From cfcc59ce355cd1ca60172e1790b56565f781ca05 Mon Sep 17 00:00:00 2001 From: Mike Alfare Date: Tue, 16 Jan 2024 18:02:42 -0500 Subject: [PATCH 001/114] template repo from dbt-adapters --- .changes/header.tpl.md | 6 + .changes/unreleased/.gitkeep | 0 .changie.yaml | 33 +++ .github/ISSUE_TEMPLATE/bug-report.yml | 81 ++++++++ .github/ISSUE_TEMPLATE/config.yml | 11 + .github/ISSUE_TEMPLATE/feature-request.yml | 59 ++++++ .github/ISSUE_TEMPLATE/regression-report.yml | 78 +++++++ .github/dependabot.yml | 8 + .github/pull_request_template.md | 35 ++++ .gitignore | 160 +++++++++++++++ CHANGELOG.md | 9 + CONTRIBUTING.md | 182 +++++++++++++++++ LICENSE | 201 +++++++++++++++++++ README.md | 39 ++++ pyproject.toml | 128 ++++++++++++ 15 files changed, 1030 insertions(+) create mode 100644 .changes/header.tpl.md create mode 100644 .changes/unreleased/.gitkeep create mode 100644 .changie.yaml create mode 100644 .github/ISSUE_TEMPLATE/bug-report.yml create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature-request.yml create mode 100644 .github/ISSUE_TEMPLATE/regression-report.yml create mode 100644 .github/dependabot.yml create mode 100644 .github/pull_request_template.md create mode 100644 .gitignore create mode 100644 CHANGELOG.md create mode 100644 CONTRIBUTING.md create mode 100644 LICENSE create mode 100644 README.md create mode 100644 pyproject.toml diff --git a/.changes/header.tpl.md b/.changes/header.tpl.md new file mode 100644 index 000000000..df8faa7b2 --- /dev/null +++ b/.changes/header.tpl.md @@ -0,0 +1,6 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), +and is generated by [Changie](https://github.com/miniscruff/changie). diff --git a/.changes/unreleased/.gitkeep b/.changes/unreleased/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/.changie.yaml b/.changie.yaml new file mode 100644 index 000000000..1efbf5848 --- /dev/null +++ b/.changie.yaml @@ -0,0 +1,33 @@ +changesDir: .changes +unreleasedDir: unreleased +headerPath: header.tpl.md +changelogPath: CHANGELOG.md +versionExt: md +envPrefix: CHANGIE_ +versionFormat: '## dbt-postgres {{.Version}} - {{.Time.Format "January 02, 2006"}}' +kindFormat: '### {{.Kind}}' +changeFormat: '* {{.Body}}' +kinds: + - label: Breaking Changes + - label: Features + - label: Fixes + - label: Docs + - label: Under the Hood + - label: Dependencies + - label: Security +newlines: + afterChangelogHeader: 1 + afterKind: 1 + afterChangelogVersion: 1 + beforeKind: 1 + endOfVersion: 1 + +custom: +- key: Author + label: GitHub Username(s) (separated by a single space if multiple) + type: string + minLength: 3 +- key: Issue + label: GitHub Issue Number (separated by a single space if multiple) + type: string + minLength: 1 diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100644 index 000000000..394de1f25 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,81 @@ +name: 🐞 Bug +description: Report a bug or an issue you've found with dbt-postgres +title: "[Bug] " +labels: ["bug", "triage"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! + - type: checkboxes + attributes: + label: Is this a new bug? + description: > + In other words, is this an error, flaw, failure or fault in our software? + + If this is a bug that broke existing functionality that used to work, please open a regression issue. + If this is a bug experienced while using dbt Cloud, please report to [support](mailto:support@getdbt.com). + If this is a request for help or troubleshooting code in your own dbt project, please join our [dbt Community Slack](https://www.getdbt.com/community/join-the-community/) or open a [Discussion question](https://github.com/dbt-labs/docs.getdbt.com/discussions). + + Please search to see if an issue already exists for the bug you encountered. + options: + - label: I believe this is a new bug + required: true + - label: I have searched the existing issues, and I could not find an existing issue for this bug + required: true + - type: textarea + attributes: + label: Current Behavior + description: A concise description of what you're experiencing. + validations: + required: true + - type: textarea + attributes: + label: Expected Behavior + description: A concise description of what you expected to happen. + validations: + required: true + - type: textarea + attributes: + label: Steps To Reproduce + description: Steps to reproduce the behavior. + placeholder: | + 1. In this environment... + 2. With this config... + 3. Run '...' + 4. See error... + validations: + required: true + - type: textarea + id: logs + attributes: + label: Relevant log output + description: | + If applicable, log output to help explain your problem. + render: shell + validations: + required: false + - type: textarea + attributes: + label: Environment + description: | + examples: + - **OS**: Ubuntu 20.04 + - **Python**: 3.11.6 (`python3 --version`) + - **dbt-postgres**: 1.0.0 (`dbt --version`) + value: | + - OS: + - Python: + - dbt-postgres: + render: markdown + validations: + required: false + - type: textarea + attributes: + label: Additional Context + description: | + Links? References? Anything that will give us more context about the issue you are encountering! + + Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..8d0995f41 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,11 @@ +blank_issues_enabled: false +contact_links: + - name: Ask the community for help + url: https://github.com/dbt-labs/docs.getdbt.com/discussions + about: Need help troubleshooting? Check out our guide on how to ask + - name: Contact dbt Cloud support + url: mailto:support@getdbt.com + about: Are you using dbt Cloud? Contact our support team for help! + - name: Participate in Discussions + url: https://github.com/dbt-labs/dbt-postgres/discussions + about: Do you have a Big Idea for dbt-postgres? Read open discussions, or start a new one diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100644 index 000000000..edc32c6cb --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,59 @@ +name: ✨ Feature +description: Propose a straightforward extension of dbt-postgres functionality +title: "[Feature] <title>" +labels: ["enhancement", "triage"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this feature request! + - type: checkboxes + attributes: + label: Is this your first time submitting a feature request? + description: > + We want to make sure that features are distinct and discoverable, + so that other members of the community can find them and offer their thoughts. + + Issues are the right place to request straightforward extensions of existing dbt-postgres functionality. + For "big ideas" about future capabilities of dbt-postgres, we ask that you open a + [discussion](https://github.com/dbt-labs/dbt-postgres/discussions/new?category=ideas) in the "Ideas" category instead. + options: + - label: I have read the [expectations for open source contributors](https://docs.getdbt.com/docs/contributing/oss-expectations) + required: true + - label: I have searched the existing issues, and I could not find an existing issue for this feature + required: true + - label: I am requesting a straightforward extension of existing dbt-postgres functionality, rather than a Big Idea better suited to a discussion + required: true + - type: textarea + attributes: + label: Describe the feature + description: A clear and concise description of what you want to happen. + validations: + required: true + - type: textarea + attributes: + label: Describe alternatives you've considered + description: | + A clear and concise description of any alternative solutions or features you've considered. + validations: + required: false + - type: textarea + attributes: + label: Who will this benefit? + description: | + What kind of use case will this feature be useful for? Please be specific and provide examples, this will help us prioritize properly. + validations: + required: false + - type: input + attributes: + label: Are you interested in contributing this feature? + description: Let us know if you want to write some code, and how we can help. + validations: + required: false + - type: textarea + attributes: + label: Anything else? + description: | + Links? References? Anything that will give us more context about the feature you are suggesting! + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/regression-report.yml b/.github/ISSUE_TEMPLATE/regression-report.yml new file mode 100644 index 000000000..73bfbe759 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/regression-report.yml @@ -0,0 +1,78 @@ +name: ☣️ Regression +description: Report a regression you've observed in a newer version of dbt-postgres +title: "[Regression] <title>" +labels: ["regression", "triage"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this regression report! + - type: checkboxes + attributes: + label: Is this a regression? + description: > + A regression is when documented functionality works as expected in an older version of the software + and no longer works after upgrading to a newer version the software + options: + - label: I believe this is a regression in functionality + required: true + - label: I have searched the existing issues, and I could not find an existing issue for this regression + required: true + - type: textarea + attributes: + label: Current Behavior + description: A concise description of what you're experiencing. + validations: + required: true + - type: textarea + attributes: + label: Expected/Previous Behavior + description: A concise description of what you expected to happen. + validations: + required: true + - type: textarea + attributes: + label: Steps To Reproduce + description: Steps to reproduce the behavior. + placeholder: | + 1. In this environment... + 2. With this config... + 3. Run '...' + 4. See error... + validations: + required: true + - type: textarea + id: logs + attributes: + label: Relevant log output + description: | + If applicable, log output to help explain your problem. + render: shell + validations: + required: false + - type: textarea + attributes: + label: Environment + description: | + examples: + - **OS**: Ubuntu 20.04 + - **Python**: 3.11.6 (`python3 --version`) + - **dbt-postgres (working version)**: 1.1.0 (`dbt --version`) + - **dbt-postgres (regression version)**: 1.2.0 (`dbt --version`) + value: | + - OS: + - Python: + - dbt-postgres (working version): + - dbt-postgres (regression version): + render: markdown + validations: + required: true + - type: textarea + attributes: + label: Additional Context + description: | + Links? References? Anything that will give us more context about the issue you are encountering! + + Tip: You can attach images or log files by clicking this area to highlight it and then dragging files in. + validations: + required: false diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..2a6f34492 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + # python dependencies + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + rebase-strategy: "disabled" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..26e27c4d6 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,35 @@ +resolves # +[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/# + +<!--- + Include the number of the issue addressed by this PR above if applicable. + PRs for code changes without an associated issue *will not be merged*. + See CONTRIBUTING.md for more information. + + Include the number of the docs issue that was opened for this PR. If + this change has no user-facing implications, "N/A" suffices instead. New + docs tickets can be created by clicking the link above or by going to + https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose. +--> + +### Problem + +<!--- + Describe the problem this PR is solving. What is the application state + before this PR is merged? +--> + +### Solution + +<!--- + Describe the way this PR solves the above problem. Add as much detail as you + can to help reviewers understand your changes. Include any alternatives and + tradeoffs you considered. +--> + +### Checklist + +- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-postgres/blob/main/CONTRIBUTING.md) and understand what's expected of me +- [ ] I have run this code in development and it appears to resolve the stated issue +- [ ] This PR includes tests, or tests are not required/relevant for this PR +- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..6769e21d9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,160 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..5ca9d33a1 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,9 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), +and is generated by [Changie](https://github.com/miniscruff/changie). + + +No releases yet, this file will be updated when generating your first release. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..9d2880e8a --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,182 @@ +# Contributing to `dbt-postgres` + +- [About this document](#about-this-document) +- [Getting the code](#getting-the-code) +- [Developing](#developing) +- [Testing](#testing) +- [Documentation](#documentation) +- [Submitting a pull request](#submitting-a-pull-request) + + +## About this document + +This document is a guide for anyone interested in contributing to `dbt-postgres`. +It outlines how to install `dbt-postgres` for development, +run tests locally, update documentation, and submit pull requests. +This guide assumes users are developing on a Linux or MacOS system. +The following utilities are needed or will be installed in this guide: + +- `pip` +- `virturalenv` +- `git` +- `changie` + +If local functional testing is required, then a database instance +and appropriate credentials are also required. + +In addition to this guide, users are highly encouraged to read the `dbt-core` +[CONTRIBUTING.md](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md). +Almost all information there is applicable here. + + +## Getting the code + +`git` is required to download, modify, and sync the `dbt-postgres` code. +There are several ways to install Git. For MacOS: + +- Install [Xcode](https://developer.apple.com/support/xcode/) +- Install [Xcode Command Line Tools](https://mac.install.guide/commandlinetools/index.html) + +### External contributors + +Contributors external to the `dbt-labs` GitHub organization can contribute to `dbt-postgres` +by forking the `dbt-postgres` repository. For more on forking, check out the +[GitHub docs on forking](https://help.github.com/en/articles/fork-a-repo). To contribute: + +1. Fork the `dbt-labs/dbt-postgres` repository (e.g. `{forked-org}/dbt-postgres`) +2. Clone `{forked-org}/dbt-postgres` locally +3. Check out a new branch locally +4. Make changes in the new branch +5. Push the new branch to `{forked-org}/dbt-postgres` +6. Open a pull request in `dbt-labs/dbt-postgres` to merge `{forked-org}/dbt-postgres/{new-branch}` into `main` + +### dbt Labs contributors + +Contributors in the `dbt Labs` GitHub organization have push access to the `dbt-postgres` repo. +Rather than forking `dbt-labs/dbt-postgres`, use `dbt-labs/dbt-postgres` directly. To contribute: + +1. Clone `dbt-labs/dbt-postgres` locally +2. Check out a new branch locally +3. Make changes in the new branch +4. Push the new branch to `dbt-labs/dbt-postgres` +5. Open a pull request in `dbt-labs/dbt-postgres` to merge `{new-branch}` into `main` + + +## Developing + +### Installation + +1. Ensure the latest version of `pip` is installed: + ```shell + pip install --upgrade pip + ``` +2. Configure and activate a virtual environment using `virtualenv` as described in +[Setting up an environment](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md#setting-up-an-environment) +3. Install `dbt-postgres` and development dependencies in the virtual environment + ```shell + pip install -e .[dev] + ``` + +When `dbt-postgres` is installed this way, any changes made to the `dbt-postgres` source code +will be reflected in the virtual environment immediately. + + +## Testing + +`dbt-postgres` contains [unit](https://github.com/dbt-labs/dbt-postgres/tree/main/tests/unit) +and [functional](https://github.com/dbt-labs/dbt-postgres/tree/main/tests/functional) tests. + +### Unit tests + +Unit tests can be run locally without setting up a database connection: + +```shell +# Note: replace $strings with valid names + +# run all unit tests in a module +python -m pytest tests/unit/$test_file_name.py +# run a specific unit test +python -m pytest tests/unit/$test_file_name.py::$test_class_name::$test_method_name +``` + +### Functional tests + +Functional tests require a database to test against. There are two primary ways to run functional tests: + +- Tests will run automatically against a dbt Labs owned database during PR checks +- Tests can be run locally by configuring a `test.env` file with appropriate `ENV` variables: + ```shell + cp test.env.example test.env + $EDITOR test.env + ``` + +> **_WARNING:_** The parameters in `test.env` must link to a valid database. +> `test.env` is git-ignored, but be _extra_ careful to never check in credentials +> or other sensitive information when developing. + +Functional tests can be run locally with a valid database connection configured in `test.env`: + +```shell +# Note: replace $strings with valid names + +# run all functional tests in a directory +python -m pytest tests/functional/$test_directory +# run all functional tests in a module +python -m pytest tests/functional/$test_dir_and_filename.py +# run all functional tests in a class +python -m pytest tests/functional/$test_dir_and_filename.py::$test_class_name +# run a specific functional test +python -m pytest tests/functional/$test_dir_and_filename.py::$test_class_name::$test__method_name +``` + + +## Documentation + +### User documentation + +Many changes will require an update to `dbt-postgres` user documentation. +All contributors, whether internal or external, are encouraged to open an issue or PR +in the docs repo when submitting user-facing changes. Here are some relevant links: + +- [User docs](https://docs.getdbt.com/) + - [Warehouse Profile](https://docs.getdbt.com/reference/warehouse-profiles/) + - [Resource Configs](https://docs.getdbt.com/reference/resource-configs/) +- [User docs repo](https://github.com/dbt-labs/docs.getdbt.com) + +### CHANGELOG entry + +`dbt-postgres` uses [changie](https://changie.dev) to generate `CHANGELOG` entries. +Follow the steps to [install `changie`](https://changie.dev/guide/installation/). + +Once changie is installed and the PR is created, run: + ```shell + changie new + ``` +`changie` will walk through the process of creating a changelog entry. +Remember to commit and push the file that's created. + +> **_NOTE:_** Do not edit the `CHANGELOG.md` directly. +> Any modifications will be lost by the consolidation process. + + +## Submitting a pull request + +### Signing the CLA + +> **_NOTE:_** All contributors to `dbt-postgres` must sign the +> [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements)(CLA). + +Maintainers will be unable to merge contributions until the contributor signs the CLA. +This is a one time requirement, not a per-PR requirement. +Even without a CLA, anyone is welcome to open issues and comment on existing issues or PRs. + +### Opening a pull request + +A `dbt-postgres` maintainer will be assigned to review each PR based on priority and capacity. +They may suggest code revisions for style and clarity or they may request additional tests. +These are good things! dbt Labs believes that contributing high-quality code is a collaborative effort. +The same process is followed whether the contributor is external or another `dbt-postgres` maintainer. +Once all tests are passing and the PR has been approved by the appropriate code owners, +a `dbt-postgres` maintainer will merge the changes into `main`. + +And that's it! Happy developing :tada: diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..10a79faeb --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2024 dbt Labs, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 000000000..1a127c6c5 --- /dev/null +++ b/README.md @@ -0,0 +1,39 @@ +# dbt Postgres + +This package is responsible for: + +- defining database connection methods +- caching information from databases +- determining how relations are defined + +There are two major adapter types: base and sql + +# Directories + +## `base` + +Defines the base implementation Adapters can use to build out full functionality. + +## `sql` + +Defines a sql implementation for adapters that initially inherits the base implementation +and comes with some pre-made methods and macros that can be overwritten as needed per adapter. +(most common type of adapter.) + +# Files + +## `cache.py` + +Cached information from the database. + +## `factory.py` + +Defines how we generate adapter objects + +## `protocol.py` + +Defines various interfaces for various adapter objects. Helps mypy correctly resolve methods. + +## `reference_keys.py` + +Configures naming scheme for cache elements to be universal. diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..ad9a25f93 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,128 @@ +[project] +dynamic = ["version"] +name = "dbt-postgres" +description = "The set of adapter protocols and base functionality that supports integration with dbt-core" +readme = "README.md" +keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs", "postgres"] +requires-python = ">=3.8.0" +authors = [ + { name = "dbt Labs", email = "info@dbtlabs.com" }, +] +maintainers = [ + { name = "dbt Labs", email = "info@dbtlabs.com" }, +] +classifiers = [ + "Development Status :: 2 - Pre-Alpha", + "License :: OSI Approved :: Apache Software License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +dependencies = [ + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", +] +[project.optional-dependencies] +lint = [ + "black", + "flake8", + "Flake8-pyproject", + "mypy", + "types-protobuf", + "types-pytz", +] +test = [ + "pytest", + "pytest-dotenv", + "pytest-xdist", +] + +[project.urls] +Homepage = "https://github.com/dbt-labs/dbt-postgres" +Documentation = "https://docs.getdbt.com" +Repository = "https://github.com/dbt-labs/dbt-postgres.git" +Issues = "https://github.com/dbt-labs/dbt-postgres/issues" +Changelog = "https://github.com/dbt-labs/dbt-postgres/blob/main/CHANGELOG.md" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.sdist] +exclude = [ + "/.github", + "/.changes", + ".changie.yaml", + ".gitignore", + ".pre-commit-config.yaml", + "CONTRIBUTING.md", + "/tests", +] + +[tool.hatch.build.targets.wheel] +packages = ["dbt.postgres"] + +[tool.hatch.metadata] +# needed for installing `dbt-common` directly from github +allow-direct-references = true + +[tool.hatch.version] +path = "dbt/postgres/__about__.py" + +[tool.hatch.envs.default] +features = ["lint", "test"] +[tool.hatch.envs.default.scripts] +unit-tests = "- python -m pytest {args:tests/unit}" +lint-all = [ + "- lint-black", + "- lint-flake8", + "- lint-mypy", +] +lint-black = "python -m black ." +lint-flake8 = "python -m flake8 ." +lint-mypy = "python -m mypy ." + +[tool.black] +extend-exclude = "dbt/postgres/events/adapter_types_pb2.py" +line-length = 99 +target-version = ['py38'] + +[tool.flake8] +select = ["E", "W", "F"] +ignore = ["E203", "E501", "E741", "W503", "W504"] +exclude = [ + "dbt/postgres/events/adapter_types_pb2.py", + "tests", + "venv", +] +per-file-ignores = ["*/__init__.py: F401"] + +[tool.mypy] +namespace_packages = true +show_error_codes = true +explicit_package_bases = true +ignore_missing_imports = true +pretty = true +mypy_path = "third-party-stubs/" +files = [ + "dbt/postgres", + "tests/unit", +] +exclude = [ + "dbt/postgres/events/adapter_types_pb2.py", + "venv", +] +[[tool.mypy.overrides]] +module = ["dbt.postgres.events.adapter_types_pb2"] +follow_imports = "skip" + +[tool.pytest] +env_files = ["test.env"] +testpaths = [ + "tests/functional", + "tests/unit", +] From c0f26019677e42784d9f10be158ef98f3f5df35b Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Tue, 16 Jan 2024 18:05:40 -0500 Subject: [PATCH 002/114] copy README.md from dbt-core/plugins/postgres --- README.md | 51 ++++++++++++++++++++++++--------------------------- 1 file changed, 24 insertions(+), 27 deletions(-) diff --git a/README.md b/README.md index 1a127c6c5..8c62c4eea 100644 --- a/README.md +++ b/README.md @@ -1,39 +1,36 @@ -# dbt Postgres +<p align="center"> + <img src="https://raw.githubusercontent.com/dbt-labs/dbt-postgres/ec7dee39f793aa4f7dd3dae37282cc87664813e4/etc/dbt-logo-full.svg" alt="dbt logo" width="500"/> +</p> +<p align="center"> + <a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml"> + <img src="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml/badge.svg?event=push" alt="CI Badge"/> + </a> +</p> -This package is responsible for: +**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications. -- defining database connection methods -- caching information from databases -- determining how relations are defined +dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-aggregate the raw data in your warehouse so that it's ready for analysis. -There are two major adapter types: base and sql +## dbt-postgres -# Directories +The `dbt-postgres` package contains all of the code enabling dbt to work with a Postgres database. For +more information on using dbt with Postgres, consult [the docs](https://docs.getdbt.com/docs/profile-postgres). -## `base` +## Getting started -Defines the base implementation Adapters can use to build out full functionality. +- [Install dbt](https://docs.getdbt.com/docs/installation) +- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/) -## `sql` +## Join the dbt Community -Defines a sql implementation for adapters that initially inherits the base implementation -and comes with some pre-made methods and macros that can be overwritten as needed per adapter. -(most common type of adapter.) +- Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/) +- Read more on the [dbt Community Discourse](https://discourse.getdbt.com) -# Files +## Reporting bugs and contributing code -## `cache.py` +- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-postgres/issues/new) +- Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt-postgres/blob/main/CONTRIBUTING.md) -Cached information from the database. +## Code of Conduct -## `factory.py` - -Defines how we generate adapter objects - -## `protocol.py` - -Defines various interfaces for various adapter objects. Helps mypy correctly resolve methods. - -## `reference_keys.py` - -Configures naming scheme for cache elements to be universal. +Everyone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the [dbt Code of Conduct](https://community.getdbt.com/code-of-conduct). From 5449feb853c86cc9be691276cfccb8351ca3b105 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Tue, 16 Jan 2024 19:25:15 -0500 Subject: [PATCH 003/114] copy functional code from dbt-core/plugins/postgres --- dbt/__init__.py | 3 + dbt/adapters/postgres/__about__.py | 1 + dbt/adapters/postgres/__init__.py | 14 + dbt/adapters/postgres/column.py | 12 + dbt/adapters/postgres/connections.py | 207 ++++++++++++++ dbt/adapters/postgres/impl.py | 159 +++++++++++ dbt/adapters/postgres/relation.py | 103 +++++++ .../postgres/relation_configs/__init__.py | 11 + .../postgres/relation_configs/constants.py | 1 + .../postgres/relation_configs/index.py | 165 ++++++++++++ .../relation_configs/materialized_view.py | 112 ++++++++ dbt/include/postgres/__init__.py | 3 + dbt/include/postgres/dbt_project.yml | 5 + dbt/include/postgres/macros/adapters.sql | 254 ++++++++++++++++++ dbt/include/postgres/macros/catalog.sql | 67 +++++ .../incremental_strategies.sql | 9 + .../materializations/snapshot_merge.sql | 18 ++ dbt/include/postgres/macros/relations.sql | 80 ++++++ .../relations/materialized_view/alter.sql | 50 ++++ .../relations/materialized_view/create.sql | 8 + .../relations/materialized_view/describe.sql | 5 + .../relations/materialized_view/drop.sql | 3 + .../relations/materialized_view/refresh.sql | 3 + .../relations/materialized_view/rename.sql | 3 + .../postgres/macros/relations/table/drop.sql | 3 + .../macros/relations/table/rename.sql | 3 + .../macros/relations/table/replace.sql | 17 ++ .../postgres/macros/relations/view/drop.sql | 3 + .../postgres/macros/relations/view/rename.sql | 3 + .../macros/relations/view/replace.sql | 15 ++ dbt/include/postgres/macros/timestamps.sql | 20 ++ .../postgres/macros/utils/any_value.sql | 7 + .../macros/utils/columns_spec_ddl.sql | 0 dbt/include/postgres/macros/utils/dateadd.sql | 5 + .../postgres/macros/utils/datediff.sql | 32 +++ .../postgres/macros/utils/last_day.sql | 14 + dbt/include/postgres/macros/utils/listagg.sql | 23 ++ .../postgres/macros/utils/split_part.sql | 9 + dbt/include/postgres/profile_template.yml | 21 ++ dbt/include/postgres/sample_profiles.yml | 24 ++ pyproject.toml | 39 +-- 41 files changed, 1506 insertions(+), 28 deletions(-) create mode 100644 dbt/__init__.py create mode 100644 dbt/adapters/postgres/__about__.py create mode 100644 dbt/adapters/postgres/__init__.py create mode 100644 dbt/adapters/postgres/column.py create mode 100644 dbt/adapters/postgres/connections.py create mode 100644 dbt/adapters/postgres/impl.py create mode 100644 dbt/adapters/postgres/relation.py create mode 100644 dbt/adapters/postgres/relation_configs/__init__.py create mode 100644 dbt/adapters/postgres/relation_configs/constants.py create mode 100644 dbt/adapters/postgres/relation_configs/index.py create mode 100644 dbt/adapters/postgres/relation_configs/materialized_view.py create mode 100644 dbt/include/postgres/__init__.py create mode 100644 dbt/include/postgres/dbt_project.yml create mode 100644 dbt/include/postgres/macros/adapters.sql create mode 100644 dbt/include/postgres/macros/catalog.sql create mode 100644 dbt/include/postgres/macros/materializations/incremental_strategies.sql create mode 100644 dbt/include/postgres/macros/materializations/snapshot_merge.sql create mode 100644 dbt/include/postgres/macros/relations.sql create mode 100644 dbt/include/postgres/macros/relations/materialized_view/alter.sql create mode 100644 dbt/include/postgres/macros/relations/materialized_view/create.sql create mode 100644 dbt/include/postgres/macros/relations/materialized_view/describe.sql create mode 100644 dbt/include/postgres/macros/relations/materialized_view/drop.sql create mode 100644 dbt/include/postgres/macros/relations/materialized_view/refresh.sql create mode 100644 dbt/include/postgres/macros/relations/materialized_view/rename.sql create mode 100644 dbt/include/postgres/macros/relations/table/drop.sql create mode 100644 dbt/include/postgres/macros/relations/table/rename.sql create mode 100644 dbt/include/postgres/macros/relations/table/replace.sql create mode 100644 dbt/include/postgres/macros/relations/view/drop.sql create mode 100644 dbt/include/postgres/macros/relations/view/rename.sql create mode 100644 dbt/include/postgres/macros/relations/view/replace.sql create mode 100644 dbt/include/postgres/macros/timestamps.sql create mode 100644 dbt/include/postgres/macros/utils/any_value.sql create mode 100644 dbt/include/postgres/macros/utils/columns_spec_ddl.sql create mode 100644 dbt/include/postgres/macros/utils/dateadd.sql create mode 100644 dbt/include/postgres/macros/utils/datediff.sql create mode 100644 dbt/include/postgres/macros/utils/last_day.sql create mode 100644 dbt/include/postgres/macros/utils/listagg.sql create mode 100644 dbt/include/postgres/macros/utils/split_part.sql create mode 100644 dbt/include/postgres/profile_template.yml create mode 100644 dbt/include/postgres/sample_profiles.yml diff --git a/dbt/__init__.py b/dbt/__init__.py new file mode 100644 index 000000000..b36383a61 --- /dev/null +++ b/dbt/__init__.py @@ -0,0 +1,3 @@ +from pkgutil import extend_path + +__path__ = extend_path(__path__, __name__) diff --git a/dbt/adapters/postgres/__about__.py b/dbt/adapters/postgres/__about__.py new file mode 100644 index 000000000..f15b401d1 --- /dev/null +++ b/dbt/adapters/postgres/__about__.py @@ -0,0 +1 @@ +version = "1.8.0a1" diff --git a/dbt/adapters/postgres/__init__.py b/dbt/adapters/postgres/__init__.py new file mode 100644 index 000000000..1fa95c820 --- /dev/null +++ b/dbt/adapters/postgres/__init__.py @@ -0,0 +1,14 @@ +from dbt.adapters.base import AdapterPlugin + +from dbt.adapters.postgres.column import PostgresColumn +from dbt.adapters.postgres.connections import PostgresConnectionManager, PostgresCredentials +from dbt.adapters.postgres.impl import PostgresAdapter +from dbt.adapters.postgres.relation import PostgresRelation +from dbt.include import postgres + + +Plugin = AdapterPlugin( + adapter=PostgresAdapter, + credentials=PostgresCredentials, + include_path=postgres.PACKAGE_PATH, +) diff --git a/dbt/adapters/postgres/column.py b/dbt/adapters/postgres/column.py new file mode 100644 index 000000000..686ec0cb8 --- /dev/null +++ b/dbt/adapters/postgres/column.py @@ -0,0 +1,12 @@ +from dbt.adapters.base import Column + + +class PostgresColumn(Column): + @property + def data_type(self): + # on postgres, do not convert 'text' or 'varchar' to 'varchar()' + if self.dtype.lower() == "text" or ( + self.dtype.lower() == "character varying" and self.char_size is None + ): + return self.dtype + return super().data_type diff --git a/dbt/adapters/postgres/connections.py b/dbt/adapters/postgres/connections.py new file mode 100644 index 000000000..d573f53bf --- /dev/null +++ b/dbt/adapters/postgres/connections.py @@ -0,0 +1,207 @@ +from contextlib import contextmanager +from dataclasses import dataclass +from typing import Optional + +from dbt.adapters.base import Credentials +from dbt.adapters.contracts.connection import AdapterResponse +from dbt.adapters.events.logging import AdapterLogger +from dbt.adapters.sql import SQLConnectionManager +from dbt_common.exceptions import DbtDatabaseError, DbtRuntimeError +from dbt_common.helper_types import Port +from mashumaro.jsonschema.annotations import Maximum, Minimum +import psycopg2 +from typing_extensions import Annotated + + +logger = AdapterLogger("Postgres") + + +@dataclass +class PostgresCredentials(Credentials): + host: str + user: str + # Annotated is used by mashumaro for jsonschema generation + port: Annotated[Port, Minimum(0), Maximum(65535)] + password: str # on postgres the password is mandatory + connect_timeout: int = 10 + role: Optional[str] = None + search_path: Optional[str] = None + keepalives_idle: int = 0 # 0 means to use the default value + sslmode: Optional[str] = None + sslcert: Optional[str] = None + sslkey: Optional[str] = None + sslrootcert: Optional[str] = None + application_name: Optional[str] = "dbt" + retries: int = 1 + + _ALIASES = {"dbname": "database", "pass": "password"} + + @property + def type(self): + return "postgres" + + @property + def unique_field(self): + return self.host + + def _connection_keys(self): + return ( + "host", + "port", + "user", + "database", + "schema", + "connect_timeout", + "role", + "search_path", + "keepalives_idle", + "sslmode", + "sslcert", + "sslkey", + "sslrootcert", + "application_name", + "retries", + ) + + +class PostgresConnectionManager(SQLConnectionManager): + TYPE = "postgres" + + @contextmanager + def exception_handler(self, sql): + try: + yield + + except psycopg2.DatabaseError as e: + logger.debug("Postgres error: {}".format(str(e))) + + try: + self.rollback_if_open() + except psycopg2.Error: + logger.debug("Failed to release connection!") + pass + + raise DbtDatabaseError(str(e).strip()) from e + + except Exception as e: + logger.debug("Error running SQL: {}", sql) + logger.debug("Rolling back transaction.") + self.rollback_if_open() + if isinstance(e, DbtRuntimeError): + # during a sql query, an internal to dbt exception was raised. + # this sounds a lot like a signal handler and probably has + # useful information, so raise it without modification. + raise + + raise DbtRuntimeError(e) from e + + @classmethod + def open(cls, connection): + if connection.state == "open": + logger.debug("Connection is already open, skipping open.") + return connection + + credentials = cls.get_credentials(connection.credentials) + kwargs = {} + # we don't want to pass 0 along to connect() as postgres will try to + # call an invalid setsockopt() call (contrary to the docs). + if credentials.keepalives_idle: + kwargs["keepalives_idle"] = credentials.keepalives_idle + + # psycopg2 doesn't support search_path officially, + # see https://github.com/psycopg/psycopg2/issues/465 + search_path = credentials.search_path + if search_path is not None and search_path != "": + # see https://postgresql.org/docs/9.5/libpq-connect.html + kwargs["options"] = "-c search_path={}".format(search_path.replace(" ", "\\ ")) + + if credentials.sslmode: + kwargs["sslmode"] = credentials.sslmode + + if credentials.sslcert is not None: + kwargs["sslcert"] = credentials.sslcert + + if credentials.sslkey is not None: + kwargs["sslkey"] = credentials.sslkey + + if credentials.sslrootcert is not None: + kwargs["sslrootcert"] = credentials.sslrootcert + + if credentials.application_name: + kwargs["application_name"] = credentials.application_name + + def connect(): + handle = psycopg2.connect( + dbname=credentials.database, + user=credentials.user, + host=credentials.host, + password=credentials.password, + port=credentials.port, + connect_timeout=credentials.connect_timeout, + **kwargs, + ) + if credentials.role: + handle.cursor().execute("set role {}".format(credentials.role)) + return handle + + retryable_exceptions = [ + # OperationalError is subclassed by all psycopg2 Connection Exceptions and it's raised + # by generic connection timeouts without an error code. This is a limitation of + # psycopg2 which doesn't provide subclasses for errors without a SQLSTATE error code. + # The limitation has been known for a while and there are no efforts to tackle it. + # See: https://github.com/psycopg/psycopg2/issues/682 + psycopg2.errors.OperationalError, + ] + + def exponential_backoff(attempt: int): + return attempt * attempt + + return cls.retry_connection( + connection, + connect=connect, + logger=logger, + retry_limit=credentials.retries, + retry_timeout=exponential_backoff, + retryable_exceptions=retryable_exceptions, + ) + + def cancel(self, connection): + connection_name = connection.name + try: + pid = connection.handle.get_backend_pid() + except psycopg2.InterfaceError as exc: + # if the connection is already closed, not much to cancel! + if "already closed" in str(exc): + logger.debug(f"Connection {connection_name} was already closed") + return + # probably bad, re-raise it + raise + + sql = "select pg_terminate_backend({})".format(pid) + + logger.debug("Cancelling query '{}' ({})".format(connection_name, pid)) + + _, cursor = self.add_query(sql) + res = cursor.fetchone() + + logger.debug("Cancel query '{}': {}".format(connection_name, res)) + + @classmethod + def get_credentials(cls, credentials): + return credentials + + @classmethod + def get_response(cls, cursor) -> AdapterResponse: + message = str(cursor.statusmessage) + rows = cursor.rowcount + status_message_parts = message.split() if message is not None else [] + status_messsage_strings = [part for part in status_message_parts if not part.isdigit()] + code = " ".join(status_messsage_strings) + return AdapterResponse(_message=message, code=code, rows_affected=rows) + + @classmethod + def data_type_code_to_name(cls, type_code: int) -> str: + if type_code in psycopg2.extensions.string_types: + return psycopg2.extensions.string_types[type_code].name + else: + return f"unknown type_code {type_code}" diff --git a/dbt/adapters/postgres/impl.py b/dbt/adapters/postgres/impl.py new file mode 100644 index 000000000..d4602f7c8 --- /dev/null +++ b/dbt/adapters/postgres/impl.py @@ -0,0 +1,159 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Any, List, Optional, Set + +from dbt.adapters.base import AdapterConfig, ConstraintSupport, available +from dbt.adapters.capability import ( + Capability, + CapabilityDict, + CapabilitySupport, + Support, +) +from dbt.adapters.exceptions import ( + CrossDbReferenceProhibitedError, + IndexConfigError, + IndexConfigNotDictError, + UnexpectedDbReferenceError, +) +from dbt.adapters.sql import SQLAdapter +from dbt_common.contracts.constraints import ConstraintType +from dbt_common.dataclass_schema import ValidationError, dbtClassMixin +from dbt_common.exceptions import DbtRuntimeError +from dbt_common.utils import encoding as dbt_encoding + +from dbt.adapters.postgres import ( + PostgresColumn, + PostgresConnectionManager, + PostgresRelation, +) + + +GET_RELATIONS_MACRO_NAME = "postgres__get_relations" + + +@dataclass +class PostgresIndexConfig(dbtClassMixin): + columns: List[str] + unique: bool = False + type: Optional[str] = None + + def render(self, relation): + # We append the current timestamp to the index name because otherwise + # the index will only be created on every other run. See + # https://github.com/dbt-labs/dbt-core/issues/1945#issuecomment-576714925 + # for an explanation. + now = datetime.utcnow().isoformat() + inputs = self.columns + [relation.render(), str(self.unique), str(self.type), now] + string = "_".join(inputs) + return dbt_encoding.md5(string) + + @classmethod + def parse(cls, raw_index) -> Optional["PostgresIndexConfig"]: + if raw_index is None: + return None + try: + cls.validate(raw_index) + return cls.from_dict(raw_index) + except ValidationError as exc: + raise IndexConfigError(exc) + except TypeError: + raise IndexConfigNotDictError(raw_index) + + +@dataclass +class PostgresConfig(AdapterConfig): + unlogged: Optional[bool] = None + indexes: Optional[List[PostgresIndexConfig]] = None + + +class PostgresAdapter(SQLAdapter): + Relation = PostgresRelation + ConnectionManager = PostgresConnectionManager + Column = PostgresColumn + + AdapterSpecificConfigs = PostgresConfig + + CONSTRAINT_SUPPORT = { + ConstraintType.check: ConstraintSupport.ENFORCED, + ConstraintType.not_null: ConstraintSupport.ENFORCED, + ConstraintType.unique: ConstraintSupport.ENFORCED, + ConstraintType.primary_key: ConstraintSupport.ENFORCED, + ConstraintType.foreign_key: ConstraintSupport.ENFORCED, + } + + CATALOG_BY_RELATION_SUPPORT = True + + _capabilities: CapabilityDict = CapabilityDict( + {Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.Full)} + ) + + @classmethod + def date_function(cls): + return "now()" + + @available + def verify_database(self, database): + if database.startswith('"'): + database = database.strip('"') + expected = self.config.credentials.database + if database.lower() != expected.lower(): + raise UnexpectedDbReferenceError(self.type(), database, expected) + # return an empty string on success so macros can call this + return "" + + @available + def parse_index(self, raw_index: Any) -> Optional[PostgresIndexConfig]: + return PostgresIndexConfig.parse(raw_index) + + def _link_cached_database_relations(self, schemas: Set[str]): + """ + :param schemas: The set of schemas that should have links added. + """ + database = self.config.credentials.database + table = self.execute_macro(GET_RELATIONS_MACRO_NAME) + + for (dep_schema, dep_name, refed_schema, refed_name) in table: + dependent = self.Relation.create( + database=database, schema=dep_schema, identifier=dep_name + ) + referenced = self.Relation.create( + database=database, schema=refed_schema, identifier=refed_name + ) + + # don't record in cache if this relation isn't in a relevant + # schema + if refed_schema.lower() in schemas: + self.cache.add_link(referenced, dependent) + + def _get_catalog_schemas(self, manifest): + # postgres only allow one database (the main one) + schema_search_map = super()._get_catalog_schemas(manifest) + try: + return schema_search_map.flatten() + except DbtRuntimeError as exc: + raise CrossDbReferenceProhibitedError(self.type(), exc.msg) + + def _link_cached_relations(self, manifest): + schemas: Set[str] = set() + relations_schemas = self._get_cache_schemas(manifest) + for relation in relations_schemas: + self.verify_database(relation.database) + schemas.add(relation.schema.lower()) + + self._link_cached_database_relations(schemas) + + def _relations_cache_for_schemas(self, manifest, cache_schemas=None): + super()._relations_cache_for_schemas(manifest, cache_schemas) + self._link_cached_relations(manifest) + + def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: + return f"{add_to} + interval '{number} {interval}'" + + def valid_incremental_strategies(self): + """The set of standard builtin strategies which this adapter supports out-of-the-box. + Not used to validate custom strategies defined by end users. + """ + return ["append", "delete+insert", "merge"] + + def debug_query(self): + self.execute("select 1 as id") diff --git a/dbt/adapters/postgres/relation.py b/dbt/adapters/postgres/relation.py new file mode 100644 index 000000000..7cb31827e --- /dev/null +++ b/dbt/adapters/postgres/relation.py @@ -0,0 +1,103 @@ +from dataclasses import dataclass +from typing import FrozenSet, Optional, Set + +from dbt.adapters.base.relation import BaseRelation +from dbt.adapters.contracts.relation import RelationConfig, RelationType +from dbt.adapters.relation_configs import ( + RelationConfigChangeAction, + RelationResults, +) +from dbt_common.exceptions import DbtRuntimeError + +from dbt.adapters.postgres.relation_configs import ( + MAX_CHARACTERS_IN_IDENTIFIER, + PostgresIndexConfig, + PostgresIndexConfigChange, + PostgresMaterializedViewConfig, + PostgresMaterializedViewConfigChangeCollection, +) + + +@dataclass(frozen=True, eq=False, repr=False) +class PostgresRelation(BaseRelation): + renameable_relations = frozenset( + { + RelationType.View, + RelationType.Table, + RelationType.MaterializedView, + } + ) + replaceable_relations = frozenset( + { + RelationType.View, + RelationType.Table, + } + ) + + def __post_init__(self): + # Check for length of Postgres table/view names. + # Check self.type to exclude test relation identifiers + if ( + self.identifier is not None + and self.type is not None + and len(self.identifier) > self.relation_max_name_length() + ): + raise DbtRuntimeError( + f"Relation name '{self.identifier}' " + f"is longer than {self.relation_max_name_length()} characters" + ) + + def relation_max_name_length(self): + return MAX_CHARACTERS_IN_IDENTIFIER + + def get_materialized_view_config_change_collection( + self, relation_results: RelationResults, relation_config: RelationConfig + ) -> Optional[PostgresMaterializedViewConfigChangeCollection]: + config_change_collection = PostgresMaterializedViewConfigChangeCollection() + + existing_materialized_view = PostgresMaterializedViewConfig.from_relation_results( + relation_results + ) + new_materialized_view = PostgresMaterializedViewConfig.from_config(relation_config) + + config_change_collection.indexes = self._get_index_config_changes( + existing_materialized_view.indexes, new_materialized_view.indexes + ) + + # we return `None` instead of an empty `PostgresMaterializedViewConfigChangeCollection` object + # so that it's easier and more extensible to check in the materialization: + # `core/../materializations/materialized_view.sql` : + # {% if configuration_changes is none %} + if config_change_collection.has_changes: + return config_change_collection + + def _get_index_config_changes( + self, + existing_indexes: FrozenSet[PostgresIndexConfig], + new_indexes: FrozenSet[PostgresIndexConfig], + ) -> Set[PostgresIndexConfigChange]: + """ + Get the index updates that will occur as a result of a new run + + There are four scenarios: + + 1. Indexes are equal -> don't return these + 2. Index is new -> create these + 3. Index is old -> drop these + 4. Indexes are not equal -> drop old, create new -> two actions + + Returns: a set of index updates in the form {"action": "drop/create", "context": <IndexConfig>} + """ + drop_changes = set( + PostgresIndexConfigChange.from_dict( + {"action": RelationConfigChangeAction.drop, "context": index} + ) + for index in existing_indexes.difference(new_indexes) + ) + create_changes = set( + PostgresIndexConfigChange.from_dict( + {"action": RelationConfigChangeAction.create, "context": index} + ) + for index in new_indexes.difference(existing_indexes) + ) + return set().union(drop_changes, create_changes) diff --git a/dbt/adapters/postgres/relation_configs/__init__.py b/dbt/adapters/postgres/relation_configs/__init__.py new file mode 100644 index 000000000..d005550c6 --- /dev/null +++ b/dbt/adapters/postgres/relation_configs/__init__.py @@ -0,0 +1,11 @@ +from dbt.adapters.postgres.relation_configs.constants import ( + MAX_CHARACTERS_IN_IDENTIFIER, +) +from dbt.adapters.postgres.relation_configs.index import ( + PostgresIndexConfig, + PostgresIndexConfigChange, +) +from dbt.adapters.postgres.relation_configs.materialized_view import ( + PostgresMaterializedViewConfig, + PostgresMaterializedViewConfigChangeCollection, +) diff --git a/dbt/adapters/postgres/relation_configs/constants.py b/dbt/adapters/postgres/relation_configs/constants.py new file mode 100644 index 000000000..9228df230 --- /dev/null +++ b/dbt/adapters/postgres/relation_configs/constants.py @@ -0,0 +1 @@ +MAX_CHARACTERS_IN_IDENTIFIER = 63 diff --git a/dbt/adapters/postgres/relation_configs/index.py b/dbt/adapters/postgres/relation_configs/index.py new file mode 100644 index 000000000..ba0a9ce12 --- /dev/null +++ b/dbt/adapters/postgres/relation_configs/index.py @@ -0,0 +1,165 @@ +from dataclasses import dataclass, field +from typing import Set, FrozenSet + +import agate +from dbt_common.dataclass_schema import StrEnum +from dbt_common.exceptions import DbtRuntimeError +from dbt.adapters.relation_configs import ( + RelationConfigBase, + RelationConfigValidationMixin, + RelationConfigValidationRule, + RelationConfigChangeAction, + RelationConfigChange, +) + + +class PostgresIndexMethod(StrEnum): + btree = "btree" + hash = "hash" + gist = "gist" + spgist = "spgist" + gin = "gin" + brin = "brin" + + @classmethod + def default(cls) -> "PostgresIndexMethod": + return cls.btree + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class PostgresIndexConfig(RelationConfigBase, RelationConfigValidationMixin): + """ + This config fallows the specs found here: + https://www.postgresql.org/docs/current/sql-createindex.html + + The following parameters are configurable by dbt: + - name: the name of the index in the database, this isn't predictable since we apply a timestamp + - unique: checks for duplicate values when the index is created and on data updates + - method: the index method to be used + - column_names: the columns in the index + + Applicable defaults for non-configurable parameters: + - concurrently: `False` + - nulls_distinct: `True` + """ + + name: str = field(default=None, hash=False, compare=False) + column_names: FrozenSet[str] = field(default_factory=frozenset, hash=True) + unique: bool = field(default=False, hash=True) + method: PostgresIndexMethod = field(default=PostgresIndexMethod.default(), hash=True) + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + return { + RelationConfigValidationRule( + validation_check=self.column_names is not None, + validation_error=DbtRuntimeError( + "Indexes require at least one column, but none were provided" + ), + ), + } + + @classmethod + def from_dict(cls, config_dict) -> "PostgresIndexConfig": + # TODO: include the QuotePolicy instead of defaulting to lower() + kwargs_dict = { + "name": config_dict.get("name"), + "column_names": frozenset( + column.lower() for column in config_dict.get("column_names", set()) + ), + "unique": config_dict.get("unique"), + "method": config_dict.get("method"), + } + index: "PostgresIndexConfig" = super().from_dict(kwargs_dict) # type: ignore + return index + + @classmethod + def parse_model_node(cls, model_node_entry: dict) -> dict: + config_dict = { + "column_names": set(model_node_entry.get("columns", set())), + "unique": model_node_entry.get("unique"), + "method": model_node_entry.get("type"), + } + return config_dict + + @classmethod + def parse_relation_results(cls, relation_results_entry: agate.Row) -> dict: + config_dict = { + "name": relation_results_entry.get("name"), + "column_names": set(relation_results_entry.get("column_names", "").split(",")), + "unique": relation_results_entry.get("unique"), + "method": relation_results_entry.get("method"), + } + return config_dict + + @property + def as_node_config(self) -> dict: + """ + Returns: a dictionary that can be passed into `get_create_index_sql()` + """ + node_config = { + "columns": list(self.column_names), + "unique": self.unique, + "type": self.method.value, + } + return node_config + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class PostgresIndexConfigChange(RelationConfigChange, RelationConfigValidationMixin): + """ + Example of an index change: + { + "action": "create", + "context": { + "name": "", # we don't know the name since it gets created as a hash at runtime + "columns": ["column_1", "column_3"], + "type": "hash", + "unique": True + } + }, + { + "action": "drop", + "context": { + "name": "index_abc", # we only need this to drop, but we need the rest to compare + "columns": ["column_1"], + "type": "btree", + "unique": True + } + } + """ + + context: PostgresIndexConfig + + @property + def requires_full_refresh(self) -> bool: + return False + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + return { + RelationConfigValidationRule( + validation_check=self.action + in {RelationConfigChangeAction.create, RelationConfigChangeAction.drop}, + validation_error=DbtRuntimeError( + "Invalid operation, only `drop` and `create` changes are supported for indexes." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.action == RelationConfigChangeAction.drop and self.context.name is None + ), + validation_error=DbtRuntimeError( + "Invalid operation, attempting to drop an index with no name." + ), + ), + RelationConfigValidationRule( + validation_check=not ( + self.action == RelationConfigChangeAction.create + and self.context.column_names == set() + ), + validation_error=DbtRuntimeError( + "Invalid operations, attempting to create an index with no columns." + ), + ), + } diff --git a/dbt/adapters/postgres/relation_configs/materialized_view.py b/dbt/adapters/postgres/relation_configs/materialized_view.py new file mode 100644 index 000000000..af670c598 --- /dev/null +++ b/dbt/adapters/postgres/relation_configs/materialized_view.py @@ -0,0 +1,112 @@ +from dataclasses import dataclass, field +from typing import Set, FrozenSet, List, Dict +from typing_extensions import Self + +import agate +from dbt.adapters.contracts.relation import RelationConfig +from dbt.adapters.relation_configs import ( + RelationConfigBase, + RelationConfigValidationMixin, + RelationConfigValidationRule, + RelationResults, +) +from dbt_common.exceptions import DbtRuntimeError + +from dbt.adapters.postgres.relation_configs.constants import MAX_CHARACTERS_IN_IDENTIFIER +from dbt.adapters.postgres.relation_configs.index import ( + PostgresIndexConfig, + PostgresIndexConfigChange, +) + + +@dataclass(frozen=True, eq=True, unsafe_hash=True) +class PostgresMaterializedViewConfig(RelationConfigBase, RelationConfigValidationMixin): + """ + This config follows the specs found here: + https://www.postgresql.org/docs/current/sql-creatematerializedview.html + + The following parameters are configurable by dbt: + - table_name: name of the materialized view + - query: the query that defines the view + - indexes: the collection (set) of indexes on the materialized view + + Applicable defaults for non-configurable parameters: + - method: `heap` + - tablespace_name: `default_tablespace` + - with_data: `True` + """ + + table_name: str = "" + query: str = "" + indexes: FrozenSet[PostgresIndexConfig] = field(default_factory=frozenset) + + @property + def validation_rules(self) -> Set[RelationConfigValidationRule]: + # index rules get run by default with the mixin + return { + RelationConfigValidationRule( + validation_check=self.table_name is None + or len(self.table_name) <= MAX_CHARACTERS_IN_IDENTIFIER, + validation_error=DbtRuntimeError( + f"The materialized view name is more than {MAX_CHARACTERS_IN_IDENTIFIER} " + f"characters: {self.table_name}" + ), + ), + } + + @classmethod + def from_dict(cls, config_dict: dict) -> Self: + kwargs_dict = { + "table_name": config_dict.get("table_name"), + "query": config_dict.get("query"), + "indexes": frozenset( + PostgresIndexConfig.from_dict(index) for index in config_dict.get("indexes", {}) + ), + } + materialized_view: Self = super().from_dict(kwargs_dict) # type: ignore + return materialized_view + + @classmethod + def from_config(cls, relation_config: RelationConfig) -> Self: + materialized_view_config = cls.parse_config(relation_config) + materialized_view = cls.from_dict(materialized_view_config) + return materialized_view + + @classmethod + def parse_config(cls, relation_config: RelationConfig) -> Dict: + indexes: List[dict] = relation_config.config.extra.get("indexes", []) + config_dict = { + "table_name": relation_config.identifier, + "query": relation_config.compiled_code, + "indexes": [PostgresIndexConfig.parse_model_node(index) for index in indexes], + } + return config_dict + + @classmethod + def from_relation_results(cls, relation_results: RelationResults) -> Self: + materialized_view_config = cls.parse_relation_results(relation_results) + materialized_view = cls.from_dict(materialized_view_config) + return materialized_view + + @classmethod + def parse_relation_results(cls, relation_results: RelationResults) -> dict: + indexes: agate.Table = relation_results.get("indexes", agate.Table(rows={})) + config_dict = { + "indexes": [ + PostgresIndexConfig.parse_relation_results(index) for index in indexes.rows + ], + } + return config_dict + + +@dataclass +class PostgresMaterializedViewConfigChangeCollection: + indexes: Set[PostgresIndexConfigChange] = field(default_factory=set) + + @property + def requires_full_refresh(self) -> bool: + return any(index.requires_full_refresh for index in self.indexes) + + @property + def has_changes(self) -> bool: + return self.indexes != set() diff --git a/dbt/include/postgres/__init__.py b/dbt/include/postgres/__init__.py new file mode 100644 index 000000000..b177e5d49 --- /dev/null +++ b/dbt/include/postgres/__init__.py @@ -0,0 +1,3 @@ +import os + +PACKAGE_PATH = os.path.dirname(__file__) diff --git a/dbt/include/postgres/dbt_project.yml b/dbt/include/postgres/dbt_project.yml new file mode 100644 index 000000000..081149f6f --- /dev/null +++ b/dbt/include/postgres/dbt_project.yml @@ -0,0 +1,5 @@ +config-version: 2 +name: dbt_postgres +version: 1.0 + +macro-paths: ["macros"] diff --git a/dbt/include/postgres/macros/adapters.sql b/dbt/include/postgres/macros/adapters.sql new file mode 100644 index 000000000..ee864e9b7 --- /dev/null +++ b/dbt/include/postgres/macros/adapters.sql @@ -0,0 +1,254 @@ +{% macro postgres__create_table_as(temporary, relation, sql) -%} + {%- set unlogged = config.get('unlogged', default=false) -%} + {%- set sql_header = config.get('sql_header', none) -%} + + {{ sql_header if sql_header is not none }} + + create {% if temporary -%} + temporary + {%- elif unlogged -%} + unlogged + {%- endif %} table {{ relation }} + {% set contract_config = config.get('contract') %} + {% if contract_config.enforced %} + {{ get_assert_columns_equivalent(sql) }} + {% endif -%} + {% if contract_config.enforced and (not temporary) -%} + {{ get_table_columns_and_constraints() }} ; + insert into {{ relation }} ( + {{ adapter.dispatch('get_column_names', 'dbt')() }} + ) + {%- set sql = get_select_subquery(sql) %} + {% else %} + as + {% endif %} + ( + {{ sql }} + ); +{%- endmacro %} + +{% macro postgres__get_create_index_sql(relation, index_dict) -%} + {%- set index_config = adapter.parse_index(index_dict) -%} + {%- set comma_separated_columns = ", ".join(index_config.columns) -%} + {%- set index_name = index_config.render(relation) -%} + + create {% if index_config.unique -%} + unique + {%- endif %} index if not exists + "{{ index_name }}" + on {{ relation }} {% if index_config.type -%} + using {{ index_config.type }} + {%- endif %} + ({{ comma_separated_columns }}); +{%- endmacro %} + +{% macro postgres__create_schema(relation) -%} + {% if relation.database -%} + {{ adapter.verify_database(relation.database) }} + {%- endif -%} + {%- call statement('create_schema') -%} + create schema if not exists {{ relation.without_identifier().include(database=False) }} + {%- endcall -%} +{% endmacro %} + +{% macro postgres__drop_schema(relation) -%} + {% if relation.database -%} + {{ adapter.verify_database(relation.database) }} + {%- endif -%} + {%- call statement('drop_schema') -%} + drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade + {%- endcall -%} +{% endmacro %} + +{% macro postgres__get_columns_in_relation(relation) -%} + {% call statement('get_columns_in_relation', fetch_result=True) %} + select + column_name, + data_type, + character_maximum_length, + numeric_precision, + numeric_scale + + from {{ relation.information_schema('columns') }} + where table_name = '{{ relation.identifier }}' + {% if relation.schema %} + and table_schema = '{{ relation.schema }}' + {% endif %} + order by ordinal_position + + {% endcall %} + {% set table = load_result('get_columns_in_relation').table %} + {{ return(sql_convert_columns_in_relation(table)) }} +{% endmacro %} + + +{% macro postgres__list_relations_without_caching(schema_relation) %} + {% call statement('list_relations_without_caching', fetch_result=True) -%} + select + '{{ schema_relation.database }}' as database, + tablename as name, + schemaname as schema, + 'table' as type + from pg_tables + where schemaname ilike '{{ schema_relation.schema }}' + union all + select + '{{ schema_relation.database }}' as database, + viewname as name, + schemaname as schema, + 'view' as type + from pg_views + where schemaname ilike '{{ schema_relation.schema }}' + union all + select + '{{ schema_relation.database }}' as database, + matviewname as name, + schemaname as schema, + 'materialized_view' as type + from pg_matviews + where schemaname ilike '{{ schema_relation.schema }}' + {% endcall %} + {{ return(load_result('list_relations_without_caching').table) }} +{% endmacro %} + +{% macro postgres__information_schema_name(database) -%} + {% if database_name -%} + {{ adapter.verify_database(database_name) }} + {%- endif -%} + information_schema +{%- endmacro %} + +{% macro postgres__list_schemas(database) %} + {% if database -%} + {{ adapter.verify_database(database) }} + {%- endif -%} + {% call statement('list_schemas', fetch_result=True, auto_begin=False) %} + select distinct nspname from pg_namespace + {% endcall %} + {{ return(load_result('list_schemas').table) }} +{% endmacro %} + +{% macro postgres__check_schema_exists(information_schema, schema) -%} + {% if information_schema.database -%} + {{ adapter.verify_database(information_schema.database) }} + {%- endif -%} + {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %} + select count(*) from pg_namespace where nspname = '{{ schema }}' + {% endcall %} + {{ return(load_result('check_schema_exists').table) }} +{% endmacro %} + +{# + Postgres tables have a maximum length of 63 characters, anything longer is silently truncated. + Temp and backup relations add a lot of extra characters to the end of table names to ensure uniqueness. + To prevent this going over the character limit, the base_relation name is truncated to ensure + that name + suffix + uniquestring is < 63 characters. +#} + +{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %} + {% if dstring %} + {% set dt = modules.datetime.datetime.now() %} + {% set dtstring = dt.strftime("%H%M%S%f") %} + {% set suffix = suffix ~ dtstring %} + {% endif %} + {% set suffix_length = suffix|length %} + {% set relation_max_name_length = base_relation.relation_max_name_length() %} + {% if suffix_length > relation_max_name_length %} + {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %} + {% endif %} + {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %} + + {{ return(base_relation.incorporate(path={"identifier": identifier })) }} + + {% endmacro %} + +{% macro postgres__make_intermediate_relation(base_relation, suffix) %} + {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }} +{% endmacro %} + +{% macro postgres__make_temp_relation(base_relation, suffix) %} + {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %} + {{ return(temp_relation.incorporate(path={"schema": none, + "database": none})) }} +{% endmacro %} + +{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %} + {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %} + {{ return(backup_relation.incorporate(type=backup_relation_type)) }} +{% endmacro %} + +{# + By using dollar-quoting like this, users can embed anything they want into their comments + (including nested dollar-quoting), as long as they do not use this exact dollar-quoting + label. It would be nice to just pick a new one but eventually you do have to give up. +#} +{% macro postgres_escape_comment(comment) -%} + {% if comment is not string %} + {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %} + {% endif %} + {%- set magic = '$dbt_comment_literal_block$' -%} + {%- if magic in comment -%} + {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%} + {%- endif -%} + {{ magic }}{{ comment }}{{ magic }} +{%- endmacro %} + + +{% macro postgres__alter_relation_comment(relation, comment) %} + {% set escaped_comment = postgres_escape_comment(comment) %} + comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }}; +{% endmacro %} + + +{% macro postgres__alter_column_comment(relation, column_dict) %} + {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute="name") | list %} + {% for column_name in column_dict if (column_name in existing_columns) %} + {% set comment = column_dict[column_name]['description'] %} + {% set escaped_comment = postgres_escape_comment(comment) %} + comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }}; + {% endfor %} +{% endmacro %} + +{%- macro postgres__get_show_grant_sql(relation) -%} + select grantee, privilege_type + from {{ relation.information_schema('role_table_grants') }} + where grantor = current_role + and grantee != current_role + and table_schema = '{{ relation.schema }}' + and table_name = '{{ relation.identifier }}' +{%- endmacro -%} + +{% macro postgres__copy_grants() %} + {{ return(False) }} +{% endmacro %} + + +{% macro postgres__get_show_indexes_sql(relation) %} + select + i.relname as name, + m.amname as method, + ix.indisunique as "unique", + array_to_string(array_agg(a.attname), ',') as column_names + from pg_index ix + join pg_class i + on i.oid = ix.indexrelid + join pg_am m + on m.oid=i.relam + join pg_class t + on t.oid = ix.indrelid + join pg_namespace n + on n.oid = t.relnamespace + join pg_attribute a + on a.attrelid = t.oid + and a.attnum = ANY(ix.indkey) + where t.relname = '{{ relation.identifier }}' + and n.nspname = '{{ relation.schema }}' + and t.relkind in ('r', 'm') + group by 1, 2, 3 + order by 1, 2, 3 +{% endmacro %} + + +{%- macro postgres__get_drop_index_sql(relation, index_name) -%} + drop index if exists "{{ relation.schema }}"."{{ index_name }}" +{%- endmacro -%} diff --git a/dbt/include/postgres/macros/catalog.sql b/dbt/include/postgres/macros/catalog.sql new file mode 100644 index 000000000..bd587f0ac --- /dev/null +++ b/dbt/include/postgres/macros/catalog.sql @@ -0,0 +1,67 @@ + +{% macro postgres__get_catalog_relations(information_schema, relations) -%} + {%- call statement('catalog', fetch_result=True) -%} + + {# + If the user has multiple databases set and the first one is wrong, this will fail. + But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better. + #} + {% set database = information_schema.database %} + {{ adapter.verify_database(database) }} + + select + '{{ database }}' as table_database, + sch.nspname as table_schema, + tbl.relname as table_name, + case tbl.relkind + when 'v' then 'VIEW' + when 'm' then 'MATERIALIZED VIEW' + else 'BASE TABLE' + end as table_type, + tbl_desc.description as table_comment, + col.attname as column_name, + col.attnum as column_index, + pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type, + col_desc.description as column_comment, + pg_get_userbyid(tbl.relowner) as table_owner + + from pg_catalog.pg_namespace sch + join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid + join pg_catalog.pg_attribute col on col.attrelid = tbl.oid + left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0) + left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum) + where ( + {%- for relation in relations -%} + {%- if relation.identifier -%} + (upper(sch.nspname) = upper('{{ relation.schema }}') and + upper(tbl.relname) = upper('{{ relation.identifier }}')) + {%- else-%} + upper(sch.nspname) = upper('{{ relation.schema }}') + {%- endif -%} + {%- if not loop.last %} or {% endif -%} + {%- endfor -%} + ) + and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session + and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables + and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table + and col.attnum > 0 -- negative numbers are used for system columns such as oid + and not col.attisdropped -- column as not been dropped + + order by + sch.nspname, + tbl.relname, + col.attnum + + {%- endcall -%} + + {{ return(load_result('catalog').table) }} +{%- endmacro %} + + +{% macro postgres__get_catalog(information_schema, schemas) -%} + {%- set relations = [] -%} + {%- for schema in schemas -%} + {%- set dummy = relations.append({'schema': schema}) -%} + {%- endfor -%} + {{ return(postgres__get_catalog_relations(information_schema, relations)) }} +{%- endmacro %} diff --git a/dbt/include/postgres/macros/materializations/incremental_strategies.sql b/dbt/include/postgres/macros/materializations/incremental_strategies.sql new file mode 100644 index 000000000..f2fbf41e0 --- /dev/null +++ b/dbt/include/postgres/macros/materializations/incremental_strategies.sql @@ -0,0 +1,9 @@ +{% macro postgres__get_incremental_default_sql(arg_dict) %} + + {% if arg_dict["unique_key"] %} + {% do return(get_incremental_delete_insert_sql(arg_dict)) %} + {% else %} + {% do return(get_incremental_append_sql(arg_dict)) %} + {% endif %} + +{% endmacro %} diff --git a/dbt/include/postgres/macros/materializations/snapshot_merge.sql b/dbt/include/postgres/macros/materializations/snapshot_merge.sql new file mode 100644 index 000000000..807c70b6c --- /dev/null +++ b/dbt/include/postgres/macros/materializations/snapshot_merge.sql @@ -0,0 +1,18 @@ + +{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%} + {%- set insert_cols_csv = insert_cols | join(', ') -%} + + update {{ target }} + set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to + from {{ source }} as DBT_INTERNAL_SOURCE + where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text + and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text) + and {{ target }}.dbt_valid_to is null; + + insert into {{ target }} ({{ insert_cols_csv }}) + select {% for column in insert_cols -%} + DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %} + {%- endfor %} + from {{ source }} as DBT_INTERNAL_SOURCE + where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text; +{% endmacro %} diff --git a/dbt/include/postgres/macros/relations.sql b/dbt/include/postgres/macros/relations.sql new file mode 100644 index 000000000..dd50cf001 --- /dev/null +++ b/dbt/include/postgres/macros/relations.sql @@ -0,0 +1,80 @@ +{% macro postgres__get_relations() -%} + + {# + -- in pg_depend, objid is the dependent, refobjid is the referenced object + -- > a pg_depend entry indicates that the referenced object cannot be + -- > dropped without also dropping the dependent object. + #} + + {%- call statement('relations', fetch_result=True) -%} + with relation as ( + select + pg_rewrite.ev_class as class, + pg_rewrite.oid as id + from pg_rewrite + ), + class as ( + select + oid as id, + relname as name, + relnamespace as schema, + relkind as kind + from pg_class + ), + dependency as ( + select distinct + pg_depend.objid as id, + pg_depend.refobjid as ref + from pg_depend + ), + schema as ( + select + pg_namespace.oid as id, + pg_namespace.nspname as name + from pg_namespace + where nspname != 'information_schema' and nspname not like 'pg\_%' + ), + referenced as ( + select + relation.id AS id, + referenced_class.name , + referenced_class.schema , + referenced_class.kind + from relation + join class as referenced_class on relation.class=referenced_class.id + where referenced_class.kind in ('r', 'v', 'm') + ), + relationships as ( + select + referenced.name as referenced_name, + referenced.schema as referenced_schema_id, + dependent_class.name as dependent_name, + dependent_class.schema as dependent_schema_id, + referenced.kind as kind + from referenced + join dependency on referenced.id=dependency.id + join class as dependent_class on dependency.ref=dependent_class.id + where + (referenced.name != dependent_class.name or + referenced.schema != dependent_class.schema) + ) + + select + referenced_schema.name as referenced_schema, + relationships.referenced_name as referenced_name, + dependent_schema.name as dependent_schema, + relationships.dependent_name as dependent_name + from relationships + join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id + join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id + group by referenced_schema, referenced_name, dependent_schema, dependent_name + order by referenced_schema, referenced_name, dependent_schema, dependent_name; + + {%- endcall -%} + + {{ return(load_result('relations').table) }} +{% endmacro %} + +{% macro postgres_get_relations() %} + {{ return(postgres__get_relations()) }} +{% endmacro %} diff --git a/dbt/include/postgres/macros/relations/materialized_view/alter.sql b/dbt/include/postgres/macros/relations/materialized_view/alter.sql new file mode 100644 index 000000000..ee53c1136 --- /dev/null +++ b/dbt/include/postgres/macros/relations/materialized_view/alter.sql @@ -0,0 +1,50 @@ +{% macro postgres__get_alter_materialized_view_as_sql( + relation, + configuration_changes, + sql, + existing_relation, + backup_relation, + intermediate_relation +) %} + + -- apply a full refresh immediately if needed + {% if configuration_changes.requires_full_refresh %} + + {{ get_replace_sql(existing_relation, relation, sql) }} + + -- otherwise apply individual changes as needed + {% else %} + + {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }} + + {%- endif -%} + +{% endmacro %} + + +{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%} + {{- log("Applying UPDATE INDEXES to: " ~ relation) -}} + + {%- for _index_change in index_changes -%} + {%- set _index = _index_change.context -%} + + {%- if _index_change.action == "drop" -%} + + {{ postgres__get_drop_index_sql(relation, _index.name) }}; + + {%- elif _index_change.action == "create" -%} + + {{ postgres__get_create_index_sql(relation, _index.as_node_config) }} + + {%- endif -%} + + {%- endfor -%} + +{%- endmacro -%} + + +{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %} + {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %} + {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config.model) %} + {% do return(_configuration_changes) %} +{% endmacro %} diff --git a/dbt/include/postgres/macros/relations/materialized_view/create.sql b/dbt/include/postgres/macros/relations/materialized_view/create.sql new file mode 100644 index 000000000..17e5cb064 --- /dev/null +++ b/dbt/include/postgres/macros/relations/materialized_view/create.sql @@ -0,0 +1,8 @@ +{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %} + create materialized view if not exists {{ relation }} as {{ sql }}; + + {% for _index_dict in config.get('indexes', []) -%} + {{- get_create_index_sql(relation, _index_dict) -}} + {%- endfor -%} + +{% endmacro %} diff --git a/dbt/include/postgres/macros/relations/materialized_view/describe.sql b/dbt/include/postgres/macros/relations/materialized_view/describe.sql new file mode 100644 index 000000000..cb133b6a8 --- /dev/null +++ b/dbt/include/postgres/macros/relations/materialized_view/describe.sql @@ -0,0 +1,5 @@ +{% macro postgres__describe_materialized_view(relation) %} + -- for now just get the indexes, we don't need the name or the query yet + {% set _indexes = run_query(get_show_indexes_sql(relation)) %} + {% do return({'indexes': _indexes}) %} +{% endmacro %} diff --git a/dbt/include/postgres/macros/relations/materialized_view/drop.sql b/dbt/include/postgres/macros/relations/materialized_view/drop.sql new file mode 100644 index 000000000..2263bb652 --- /dev/null +++ b/dbt/include/postgres/macros/relations/materialized_view/drop.sql @@ -0,0 +1,3 @@ +{% macro postgres__drop_materialized_view(relation) -%} + drop materialized view if exists {{ relation }} cascade +{%- endmacro %} diff --git a/dbt/include/postgres/macros/relations/materialized_view/refresh.sql b/dbt/include/postgres/macros/relations/materialized_view/refresh.sql new file mode 100644 index 000000000..48b863e51 --- /dev/null +++ b/dbt/include/postgres/macros/relations/materialized_view/refresh.sql @@ -0,0 +1,3 @@ +{% macro postgres__refresh_materialized_view(relation) %} + refresh materialized view {{ relation }} +{% endmacro %} diff --git a/dbt/include/postgres/macros/relations/materialized_view/rename.sql b/dbt/include/postgres/macros/relations/materialized_view/rename.sql new file mode 100644 index 000000000..293ec9d1e --- /dev/null +++ b/dbt/include/postgres/macros/relations/materialized_view/rename.sql @@ -0,0 +1,3 @@ +{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %} + alter materialized view {{ relation }} rename to {{ new_name }} +{% endmacro %} diff --git a/dbt/include/postgres/macros/relations/table/drop.sql b/dbt/include/postgres/macros/relations/table/drop.sql new file mode 100644 index 000000000..146cfc827 --- /dev/null +++ b/dbt/include/postgres/macros/relations/table/drop.sql @@ -0,0 +1,3 @@ +{% macro postgres__drop_table(relation) -%} + drop table if exists {{ relation }} cascade +{%- endmacro %} diff --git a/dbt/include/postgres/macros/relations/table/rename.sql b/dbt/include/postgres/macros/relations/table/rename.sql new file mode 100644 index 000000000..bc3c234ab --- /dev/null +++ b/dbt/include/postgres/macros/relations/table/rename.sql @@ -0,0 +1,3 @@ +{% macro postgres__get_rename_table_sql(relation, new_name) %} + alter table {{ relation }} rename to {{ new_name }} +{% endmacro %} diff --git a/dbt/include/postgres/macros/relations/table/replace.sql b/dbt/include/postgres/macros/relations/table/replace.sql new file mode 100644 index 000000000..3750edfdf --- /dev/null +++ b/dbt/include/postgres/macros/relations/table/replace.sql @@ -0,0 +1,17 @@ +{% macro postgres__get_replace_table_sql(relation, sql) -%} + + {%- set sql_header = config.get('sql_header', none) -%} + {{ sql_header if sql_header is not none }} + + create or replace table {{ relation }} + {% set contract_config = config.get('contract') %} + {% if contract_config.enforced %} + {{ get_assert_columns_equivalent(sql) }} + {{ get_table_columns_and_constraints() }} + {%- set sql = get_select_subquery(sql) %} + {% endif %} + as ( + {{ sql }} + ); + +{%- endmacro %} diff --git a/dbt/include/postgres/macros/relations/view/drop.sql b/dbt/include/postgres/macros/relations/view/drop.sql new file mode 100644 index 000000000..46bd5a063 --- /dev/null +++ b/dbt/include/postgres/macros/relations/view/drop.sql @@ -0,0 +1,3 @@ +{% macro postgres__drop_view(relation) -%} + drop view if exists {{ relation }} cascade +{%- endmacro %} diff --git a/dbt/include/postgres/macros/relations/view/rename.sql b/dbt/include/postgres/macros/relations/view/rename.sql new file mode 100644 index 000000000..3c890a5b2 --- /dev/null +++ b/dbt/include/postgres/macros/relations/view/rename.sql @@ -0,0 +1,3 @@ +{% macro postgres__get_rename_view_sql(relation, new_name) %} + alter view {{ relation }} rename to {{ new_name }} +{% endmacro %} diff --git a/dbt/include/postgres/macros/relations/view/replace.sql b/dbt/include/postgres/macros/relations/view/replace.sql new file mode 100644 index 000000000..e2724c37e --- /dev/null +++ b/dbt/include/postgres/macros/relations/view/replace.sql @@ -0,0 +1,15 @@ +{% macro postgres__get_replace_view_sql(relation, sql) -%} + + {%- set sql_header = config.get('sql_header', none) -%} + {{ sql_header if sql_header is not none }} + + create or replace view {{ relation }} + {% set contract_config = config.get('contract') %} + {% if contract_config.enforced %} + {{ get_assert_columns_equivalent(sql) }} + {%- endif %} + as ( + {{ sql }} + ); + +{%- endmacro %} diff --git a/dbt/include/postgres/macros/timestamps.sql b/dbt/include/postgres/macros/timestamps.sql new file mode 100644 index 000000000..7233571b6 --- /dev/null +++ b/dbt/include/postgres/macros/timestamps.sql @@ -0,0 +1,20 @@ +{% macro postgres__current_timestamp() -%} + now() +{%- endmacro %} + +{% macro postgres__snapshot_string_as_time(timestamp) -%} + {%- set result = "'" ~ timestamp ~ "'::timestamp without time zone" -%} + {{ return(result) }} +{%- endmacro %} + +{% macro postgres__snapshot_get_time() -%} + {{ current_timestamp() }}::timestamp without time zone +{%- endmacro %} + +{% macro postgres__current_timestamp_backcompat() %} + current_timestamp::{{ type_timestamp() }} +{% endmacro %} + +{% macro postgres__current_timestamp_in_utc_backcompat() %} + (current_timestamp at time zone 'utc')::{{ type_timestamp() }} +{% endmacro %} diff --git a/dbt/include/postgres/macros/utils/any_value.sql b/dbt/include/postgres/macros/utils/any_value.sql new file mode 100644 index 000000000..6fcb4eebe --- /dev/null +++ b/dbt/include/postgres/macros/utils/any_value.sql @@ -0,0 +1,7 @@ +{#- /*Postgres doesn't support any_value, so we're using min() to get the same result*/ -#} + +{% macro postgres__any_value(expression) -%} + + min({{ expression }}) + +{%- endmacro %} diff --git a/dbt/include/postgres/macros/utils/columns_spec_ddl.sql b/dbt/include/postgres/macros/utils/columns_spec_ddl.sql new file mode 100644 index 000000000..e69de29bb diff --git a/dbt/include/postgres/macros/utils/dateadd.sql b/dbt/include/postgres/macros/utils/dateadd.sql new file mode 100644 index 000000000..97009ccdd --- /dev/null +++ b/dbt/include/postgres/macros/utils/dateadd.sql @@ -0,0 +1,5 @@ +{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %} + + {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }})) + +{% endmacro %} diff --git a/dbt/include/postgres/macros/utils/datediff.sql b/dbt/include/postgres/macros/utils/datediff.sql new file mode 100644 index 000000000..b452529be --- /dev/null +++ b/dbt/include/postgres/macros/utils/datediff.sql @@ -0,0 +1,32 @@ +{% macro postgres__datediff(first_date, second_date, datepart) -%} + + {% if datepart == 'year' %} + (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date)) + {% elif datepart == 'quarter' %} + ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date)) + {% elif datepart == 'month' %} + ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date)) + {% elif datepart == 'day' %} + (({{second_date}})::date - ({{first_date}})::date) + {% elif datepart == 'week' %} + ({{ datediff(first_date, second_date, 'day') }} / 7 + case + when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then + case when {{first_date}} <= {{second_date}} then 0 else -1 end + else + case when {{first_date}} <= {{second_date}} then 1 else 0 end + end) + {% elif datepart == 'hour' %} + ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp)) + {% elif datepart == 'minute' %} + ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp)) + {% elif datepart == 'second' %} + ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp))) + {% elif datepart == 'millisecond' %} + ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp))) + {% elif datepart == 'microsecond' %} + ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp))) + {% else %} + {{ exceptions.raise_compiler_error("Unsupported datepart for macro datediff in postgres: {!r}".format(datepart)) }} + {% endif %} + +{%- endmacro %} diff --git a/dbt/include/postgres/macros/utils/last_day.sql b/dbt/include/postgres/macros/utils/last_day.sql new file mode 100644 index 000000000..16995301c --- /dev/null +++ b/dbt/include/postgres/macros/utils/last_day.sql @@ -0,0 +1,14 @@ +{% macro postgres__last_day(date, datepart) -%} + + {%- if datepart == 'quarter' -%} + -- postgres dateadd does not support quarter interval. + cast( + {{dbt.dateadd('day', '-1', + dbt.dateadd('month', '3', dbt.date_trunc(datepart, date)) + )}} + as date) + {%- else -%} + {{dbt.default_last_day(date, datepart)}} + {%- endif -%} + +{%- endmacro %} diff --git a/dbt/include/postgres/macros/utils/listagg.sql b/dbt/include/postgres/macros/utils/listagg.sql new file mode 100644 index 000000000..f3e19427d --- /dev/null +++ b/dbt/include/postgres/macros/utils/listagg.sql @@ -0,0 +1,23 @@ +{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%} + + {% if limit_num -%} + array_to_string( + (array_agg( + {{ measure }} + {% if order_by_clause -%} + {{ order_by_clause }} + {%- endif %} + ))[1:{{ limit_num }}], + {{ delimiter_text }} + ) + {%- else %} + string_agg( + {{ measure }}, + {{ delimiter_text }} + {% if order_by_clause -%} + {{ order_by_clause }} + {%- endif %} + ) + {%- endif %} + +{%- endmacro %} diff --git a/dbt/include/postgres/macros/utils/split_part.sql b/dbt/include/postgres/macros/utils/split_part.sql new file mode 100644 index 000000000..e4174d2ee --- /dev/null +++ b/dbt/include/postgres/macros/utils/split_part.sql @@ -0,0 +1,9 @@ +{% macro postgres__split_part(string_text, delimiter_text, part_number) %} + + {% if part_number >= 0 %} + {{ dbt.default__split_part(string_text, delimiter_text, part_number) }} + {% else %} + {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }} + {% endif %} + +{% endmacro %} diff --git a/dbt/include/postgres/profile_template.yml b/dbt/include/postgres/profile_template.yml new file mode 100644 index 000000000..5060a272f --- /dev/null +++ b/dbt/include/postgres/profile_template.yml @@ -0,0 +1,21 @@ +fixed: + type: postgres +prompts: + host: + hint: 'hostname for the instance' + port: + default: 5432 + type: 'int' + user: + hint: 'dev username' + pass: + hint: 'dev password' + hide_input: true + dbname: + hint: 'default database that dbt will build objects in' + schema: + hint: 'default schema that dbt will build objects in' + threads: + hint: '1 or more' + type: 'int' + default: 1 diff --git a/dbt/include/postgres/sample_profiles.yml b/dbt/include/postgres/sample_profiles.yml new file mode 100644 index 000000000..567f39128 --- /dev/null +++ b/dbt/include/postgres/sample_profiles.yml @@ -0,0 +1,24 @@ +default: + outputs: + + dev: + type: postgres + threads: [1 or more] + host: [host] + port: [port] + user: [dev_username] + pass: [dev_password] + dbname: [dbname] + schema: [dev_schema] + + prod: + type: postgres + threads: [1 or more] + host: [host] + port: [port] + user: [prod_username] + pass: [prod_password] + dbname: [dbname] + schema: [prod_schema] + + target: dev diff --git a/pyproject.toml b/pyproject.toml index ad9a25f93..2ea8e1ee6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ maintainers = [ { name = "dbt Labs", email = "info@dbtlabs.com" }, ] classifiers = [ - "Development Status :: 2 - Pre-Alpha", + "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", @@ -25,6 +25,9 @@ classifiers = [ dependencies = [ "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", + "psycopg2-binary~=2.9", + # installed via dbt-common but used directly, unpin minor to avoid version conflicts + "agate<2.0", ] [project.optional-dependencies] lint = [ @@ -53,25 +56,17 @@ requires = ["hatchling"] build-backend = "hatchling.build" [tool.hatch.build.targets.sdist] -exclude = [ - "/.github", - "/.changes", - ".changie.yaml", - ".gitignore", - ".pre-commit-config.yaml", - "CONTRIBUTING.md", - "/tests", -] +include = ["dbt/*"] [tool.hatch.build.targets.wheel] -packages = ["dbt.postgres"] +packages = ["dbt"] [tool.hatch.metadata] -# needed for installing `dbt-common` directly from github +# needed for installing `dbt-adapters` and `dbt-common` directly from github allow-direct-references = true [tool.hatch.version] -path = "dbt/postgres/__about__.py" +path = "dbt/adapters/postgres/__about__.py" [tool.hatch.envs.default] features = ["lint", "test"] @@ -87,18 +82,13 @@ lint-flake8 = "python -m flake8 ." lint-mypy = "python -m mypy ." [tool.black] -extend-exclude = "dbt/postgres/events/adapter_types_pb2.py" line-length = 99 target-version = ['py38'] [tool.flake8] select = ["E", "W", "F"] ignore = ["E203", "E501", "E741", "W503", "W504"] -exclude = [ - "dbt/postgres/events/adapter_types_pb2.py", - "tests", - "venv", -] +exclude = ["tests", "venv"] per-file-ignores = ["*/__init__.py: F401"] [tool.mypy] @@ -107,18 +97,11 @@ show_error_codes = true explicit_package_bases = true ignore_missing_imports = true pretty = true -mypy_path = "third-party-stubs/" files = [ - "dbt/postgres", + "dbt/adapters/postgres", "tests/unit", ] -exclude = [ - "dbt/postgres/events/adapter_types_pb2.py", - "venv", -] -[[tool.mypy.overrides]] -module = ["dbt.postgres.events.adapter_types_pb2"] -follow_imports = "skip" +exclude = ["venv"] [tool.pytest] env_files = ["test.env"] From c02410fd33e10679efff2495ec68e9eac71c1404 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Tue, 16 Jan 2024 19:44:42 -0500 Subject: [PATCH 004/114] fix dbt logo --- README.md | 2 +- etc/dbt-logo.svg | 12 ++++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 etc/dbt-logo.svg diff --git a/README.md b/README.md index 8c62c4eea..ac9e65bd7 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ <p align="center"> - <img src="https://raw.githubusercontent.com/dbt-labs/dbt-postgres/ec7dee39f793aa4f7dd3dae37282cc87664813e4/etc/dbt-logo-full.svg" alt="dbt logo" width="500"/> + <img src="https://raw.githubusercontent.com/dbt-labs/dbt-postgres/main/etc/dbt-logo.svg" alt="dbt logo" width="500"/> </p> <p align="center"> <a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml"> diff --git a/etc/dbt-logo.svg b/etc/dbt-logo.svg new file mode 100644 index 000000000..f94f111c7 --- /dev/null +++ b/etc/dbt-logo.svg @@ -0,0 +1,12 @@ +<svg width="404" height="90" viewBox="0 0 404 90" fill="none" xmlns="http://www.w3.org/2000/svg"> +<path d="M256 48.2601C256 30.9001 266.23 20.3201 279.89 20.3201C282.691 20.3363 285.458 20.9391 288.012 22.0896C290.567 23.2401 292.852 24.9128 294.72 27.0001L291 31.4101C289.634 29.8296 287.946 28.5595 286.048 27.685C284.151 26.8106 282.089 26.352 280 26.3401C269.83 26.3401 263.07 34.6701 263.07 48.0701C263.07 61.4701 269.52 70.0701 279.73 70.0701C284.87 70.0701 288.73 67.8401 292.2 64.0701L296 68.3301C291.74 73.2701 286.48 76.1001 279.48 76.1001C266.08 76.1001 256 65.7301 256 48.2601Z" fill="#262A38"/> +<path d="M299.6 55.24C299.6 41.94 308.3 34.24 318.07 34.24C327.84 34.24 336.54 41.91 336.54 55.24C336.54 68.57 327.84 76.1 318.07 76.1C308.3 76.1 299.6 68.44 299.6 55.24ZM329.6 55.24C329.6 46.11 324.96 39.89 318.1 39.89C311.24 39.89 306.6 46.11 306.6 55.24C306.6 64.37 311.25 70.49 318.1 70.49C324.95 70.49 329.57 64.37 329.57 55.24H329.6Z" fill="#262A38"/> +<path d="M344.22 35.2599H350.22L350.32 42.5199C353.1 37.4599 357.39 34.2699 361.88 34.2699C363.341 34.2059 364.797 34.4868 366.13 35.0899L364.91 40.9999C363.672 40.5577 362.365 40.3377 361.05 40.3499C357.68 40.3499 353.66 42.7699 350.96 49.5099V75.1099H344.22V35.2599Z" fill="#262A38"/> +<path d="M366.1 55.24C366.1 42.24 374.85 34.24 384.02 34.24C394.19 34.24 399.93 41.54 399.93 52.95C399.941 54.2338 399.834 55.5159 399.61 56.78H372.77C373.25 65.22 378.41 70.7 386.01 70.7C389.565 70.6737 393.026 69.5606 395.93 67.51L398.35 72C394.475 74.6625 389.891 76.1012 385.19 76.13C374.56 76.1 366.1 68.33 366.1 55.24ZM394 52C394 44 390.4 39.67 384.17 39.67C378.57 39.67 373.56 44.17 372.74 52H394Z" fill="#262A38"/> +<path d="M154.15 13.9199V75.3699H141.7V69.0999C140.41 71.2981 138.514 73.0789 136.24 74.2299C133.753 75.4616 131.015 76.1016 128.24 76.0999C124.792 76.1849 121.403 75.1883 118.55 73.2499C115.644 71.3173 113.334 68.6127 111.88 65.4399C110.251 61.8077 109.445 57.8601 109.52 53.8799C109.436 49.9247 110.242 46.0012 111.88 42.3999C113.335 39.2945 115.614 36.6482 118.47 34.7499C121.389 32.9073 124.79 31.9744 128.24 32.0699C130.932 32.0391 133.593 32.6529 136 33.8599C138.25 34.9823 140.138 36.715 141.45 38.8599V18.0699L154.15 13.9199ZM139.15 63.2399C140.78 61.1199 141.68 58.0299 141.68 54.0399C141.68 50.0499 140.86 47.0399 139.15 44.9299C138.249 43.9126 137.145 43.0966 135.907 42.5349C134.67 41.9732 133.329 41.6785 131.97 41.67C130.611 41.6615 129.266 41.9394 128.022 42.4856C126.778 43.0318 125.663 43.834 124.75 44.8399C123.04 46.9599 122.14 49.9699 122.14 53.8399C122.14 57.7099 123.04 60.9199 124.75 63.1199C125.611 64.2087 126.716 65.0807 127.975 65.6659C129.234 66.2511 130.612 66.5333 132 66.4899C133.367 66.5565 134.731 66.2964 135.978 65.7312C137.225 65.1661 138.319 64.3121 139.17 63.2399H139.15Z" fill="#262A38"/> +<path d="M225 42.48V58.92C224.88 59.8214 224.966 60.7383 225.252 61.6015C225.538 62.4647 226.016 63.2517 226.651 63.9028C227.286 64.554 228.06 65.0523 228.916 65.3602C229.771 65.6681 230.686 65.7774 231.59 65.68C232.991 65.658 234.385 65.4666 235.74 65.11V74.88C233.427 75.6856 230.988 76.0651 228.54 76C223.333 76 219.333 74.5333 216.54 71.6C213.747 68.6667 212.363 64.5167 212.39 59.15V23.11L225 19V33H235.74V42.44L225 42.48Z" fill="#262A38"/> +<path d="M86.19 3.74C88.3187 5.76368 89.6674 8.47181 90 11.39C89.9702 12.6575 89.6266 13.8978 89 15C88.26 16.54 79.23 32.17 76.54 36.48C75.0003 39.0373 74.1846 41.965 74.18 44.95C74.1737 47.9334 74.9903 50.8607 76.54 53.41C79.19 57.7 88.22 73.41 89 75C89.6147 76.0527 89.9584 77.2416 90 78.46C89.6193 81.3242 88.3066 83.9836 86.2643 86.0277C84.2221 88.0717 81.5639 89.3868 78.7 89.77C77.4606 89.7297 76.2535 89.3642 75.2 88.71C73.74 88 57.79 79.19 53.47 76.5C53.15 76.34 52.82 76.1 52.41 75.93L31.09 63.32C31.5455 67.4143 33.3833 71.2309 36.3 74.14C36.859 74.6987 37.4572 75.2167 38.09 75.69C37.5574 75.9283 37.0398 76.1988 36.54 76.5C32.23 79.19 16.52 88.22 15 89C13.9018 89.6362 12.659 89.9805 11.39 90C8.50046 89.7092 5.81442 88.3804 3.83 86.26C1.69884 84.2398 0.343537 81.5363 0 78.62C0.0647854 77.3847 0.428539 76.1837 1.06 75.12C1.79 73.57 10.82 57.87 13.51 53.55C15.0641 51.0026 15.881 48.074 15.87 45.09C15.8797 42.103 15.0629 39.1716 13.51 36.62C10.82 32.15 1.71 16.44 1.06 14.89C0.425939 13.8275 0.061986 12.6258 0 11.39C0.314569 8.48135 1.63782 5.7747 3.74 3.74C5.7747 1.63782 8.48135 0.314569 11.39 0C12.6588 0.0712787 13.8941 0.433998 15 1.06C16.3 1.63 27.77 8.3 33.88 11.88L35.26 12.7C35.6533 12.9649 36.0606 13.2086 36.48 13.43L37.13 13.84L58.84 26.7C58.5909 24.275 57.8495 21.9262 56.6614 19.7976C55.4733 17.6689 53.8633 15.805 51.93 14.32C52.4603 14.0835 52.9747 13.8129 53.47 13.51C57.79 10.82 73.47 1.71 75.04 1.06C76.1278 0.417179 77.3576 0.0530581 78.62 0C81.5003 0.332523 84.1758 1.65441 86.19 3.74ZM46.19 50.79L50.83 46.15C50.9837 46.0025 51.1059 45.8255 51.1894 45.6296C51.273 45.4337 51.316 45.223 51.316 45.01C51.316 44.797 51.273 44.5863 51.1894 44.3904C51.1059 44.1945 50.9837 44.0175 50.83 43.87L46.19 39.23C46.0425 39.0763 45.8655 38.9541 45.6696 38.8706C45.4737 38.787 45.263 38.744 45.05 38.744C44.837 38.744 44.6263 38.787 44.4304 38.8706C44.2345 38.9541 44.0575 39.0763 43.91 39.23L39.27 43.87C39.1163 44.0175 38.9941 44.1945 38.9106 44.3904C38.827 44.5863 38.784 44.797 38.784 45.01C38.784 45.223 38.827 45.4337 38.9106 45.6296C38.9941 45.8255 39.1163 46.0025 39.27 46.15L43.91 50.79C44.2227 51.0556 44.6197 51.2015 45.03 51.2015C45.4403 51.2015 45.8373 51.0556 46.15 50.79H46.19Z" fill="#FF694A"/> +<path d="M205.26 42.32C203.857 39.1815 201.566 36.5221 198.67 34.67C195.734 32.8669 192.345 31.9405 188.9 32C186.15 31.9157 183.426 32.5536 181 33.85C178.734 34.9861 176.866 36.7815 175.64 39V19.05H163V75.28H175.54V68.93C176.771 71.1426 178.643 72.931 180.91 74.06C183.368 75.3691 186.126 76.0136 188.91 75.93C192.346 76.0074 195.724 75.0416 198.6 73.1601C201.508 71.2299 203.819 68.5246 205.27 65.35C206.908 61.7206 207.714 57.7711 207.63 53.7901C207.71 49.8374 206.9 45.9174 205.26 42.32ZM192.48 63.1501C191.581 64.1744 190.474 64.9952 189.232 65.5574C187.99 66.1197 186.643 66.4105 185.28 66.4105C183.917 66.4105 182.57 66.1197 181.328 65.5574C180.086 64.9952 178.979 64.1744 178.08 63.1501C176.37 61 175.56 58 175.56 54C175.56 50 176.37 47.0801 178.08 44.8801C178.982 43.8648 180.088 43.0507 181.325 42.4908C182.563 41.9309 183.904 41.6376 185.262 41.6301C186.62 41.6225 187.965 41.9009 189.208 42.447C190.452 42.9932 191.567 43.7949 192.48 44.8C194.19 46.92 195.01 49.93 195.01 53.8C195 57.8601 194.19 61 192.48 63.1501Z" fill="#262A38"/> +<path d="M175.64 13.9099V19.0499H163.02V18.0699L175.64 13.9099Z" fill="#262A38"/> +<path d="M401.31 75.89C402.4 75.89 403.29 75.05 403.29 73.87C403.29 72.69 402.4 71.85 401.31 71.85C400.21 71.85 399.32 72.69 399.32 73.87C399.32 75.05 400.21 75.89 401.31 75.89ZM401.31 75.4C400.47 75.4 399.87 74.8 399.87 73.87C399.87 72.94 400.47 72.34 401.31 72.34C402.15 72.34 402.74 72.94 402.74 73.87C402.74 74.8 402.15 75.4 401.31 75.4ZM400.5 74.84H401.07V74.24H401.36L401.66 74.84H402.26L401.85 74.07C402.08 73.96 402.2 73.76 402.2 73.53C402.2 73.08 401.85 72.87 401.39 72.87H400.5V74.84ZM401.07 73.84V73.3H401.28C401.48 73.3 401.58 73.42 401.58 73.56C401.58 73.71 401.48 73.84 401.28 73.84H401.07Z" fill="#2D3142"/> +</svg> From 6b4ac7199363cd4bcb47e4a6fed34fa4d44e4eba Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Tue, 16 Jan 2024 19:46:08 -0500 Subject: [PATCH 005/114] fix dbt logo --- README.md | 2 +- etc/dbt-logo.svg | 12 ------------ 2 files changed, 1 insertion(+), 13 deletions(-) delete mode 100644 etc/dbt-logo.svg diff --git a/README.md b/README.md index ac9e65bd7..d5b8900ae 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ <p align="center"> - <img src="https://raw.githubusercontent.com/dbt-labs/dbt-postgres/main/etc/dbt-logo.svg" alt="dbt logo" width="500"/> + <img src="https://raw.githubusercontent.com/dbt-labs/dbt/ec7dee39f793aa4f7dd3dae37282cc87664813e4/etc/dbt-logo-full.svg" alt="dbt logo" width="500"/> </p> <p align="center"> <a href="https://github.com/dbt-labs/dbt-core/actions/workflows/main.yml"> diff --git a/etc/dbt-logo.svg b/etc/dbt-logo.svg deleted file mode 100644 index f94f111c7..000000000 --- a/etc/dbt-logo.svg +++ /dev/null @@ -1,12 +0,0 @@ -<svg width="404" height="90" viewBox="0 0 404 90" fill="none" xmlns="http://www.w3.org/2000/svg"> -<path d="M256 48.2601C256 30.9001 266.23 20.3201 279.89 20.3201C282.691 20.3363 285.458 20.9391 288.012 22.0896C290.567 23.2401 292.852 24.9128 294.72 27.0001L291 31.4101C289.634 29.8296 287.946 28.5595 286.048 27.685C284.151 26.8106 282.089 26.352 280 26.3401C269.83 26.3401 263.07 34.6701 263.07 48.0701C263.07 61.4701 269.52 70.0701 279.73 70.0701C284.87 70.0701 288.73 67.8401 292.2 64.0701L296 68.3301C291.74 73.2701 286.48 76.1001 279.48 76.1001C266.08 76.1001 256 65.7301 256 48.2601Z" fill="#262A38"/> -<path d="M299.6 55.24C299.6 41.94 308.3 34.24 318.07 34.24C327.84 34.24 336.54 41.91 336.54 55.24C336.54 68.57 327.84 76.1 318.07 76.1C308.3 76.1 299.6 68.44 299.6 55.24ZM329.6 55.24C329.6 46.11 324.96 39.89 318.1 39.89C311.24 39.89 306.6 46.11 306.6 55.24C306.6 64.37 311.25 70.49 318.1 70.49C324.95 70.49 329.57 64.37 329.57 55.24H329.6Z" fill="#262A38"/> -<path d="M344.22 35.2599H350.22L350.32 42.5199C353.1 37.4599 357.39 34.2699 361.88 34.2699C363.341 34.2059 364.797 34.4868 366.13 35.0899L364.91 40.9999C363.672 40.5577 362.365 40.3377 361.05 40.3499C357.68 40.3499 353.66 42.7699 350.96 49.5099V75.1099H344.22V35.2599Z" fill="#262A38"/> -<path d="M366.1 55.24C366.1 42.24 374.85 34.24 384.02 34.24C394.19 34.24 399.93 41.54 399.93 52.95C399.941 54.2338 399.834 55.5159 399.61 56.78H372.77C373.25 65.22 378.41 70.7 386.01 70.7C389.565 70.6737 393.026 69.5606 395.93 67.51L398.35 72C394.475 74.6625 389.891 76.1012 385.19 76.13C374.56 76.1 366.1 68.33 366.1 55.24ZM394 52C394 44 390.4 39.67 384.17 39.67C378.57 39.67 373.56 44.17 372.74 52H394Z" fill="#262A38"/> -<path d="M154.15 13.9199V75.3699H141.7V69.0999C140.41 71.2981 138.514 73.0789 136.24 74.2299C133.753 75.4616 131.015 76.1016 128.24 76.0999C124.792 76.1849 121.403 75.1883 118.55 73.2499C115.644 71.3173 113.334 68.6127 111.88 65.4399C110.251 61.8077 109.445 57.8601 109.52 53.8799C109.436 49.9247 110.242 46.0012 111.88 42.3999C113.335 39.2945 115.614 36.6482 118.47 34.7499C121.389 32.9073 124.79 31.9744 128.24 32.0699C130.932 32.0391 133.593 32.6529 136 33.8599C138.25 34.9823 140.138 36.715 141.45 38.8599V18.0699L154.15 13.9199ZM139.15 63.2399C140.78 61.1199 141.68 58.0299 141.68 54.0399C141.68 50.0499 140.86 47.0399 139.15 44.9299C138.249 43.9126 137.145 43.0966 135.907 42.5349C134.67 41.9732 133.329 41.6785 131.97 41.67C130.611 41.6615 129.266 41.9394 128.022 42.4856C126.778 43.0318 125.663 43.834 124.75 44.8399C123.04 46.9599 122.14 49.9699 122.14 53.8399C122.14 57.7099 123.04 60.9199 124.75 63.1199C125.611 64.2087 126.716 65.0807 127.975 65.6659C129.234 66.2511 130.612 66.5333 132 66.4899C133.367 66.5565 134.731 66.2964 135.978 65.7312C137.225 65.1661 138.319 64.3121 139.17 63.2399H139.15Z" fill="#262A38"/> -<path d="M225 42.48V58.92C224.88 59.8214 224.966 60.7383 225.252 61.6015C225.538 62.4647 226.016 63.2517 226.651 63.9028C227.286 64.554 228.06 65.0523 228.916 65.3602C229.771 65.6681 230.686 65.7774 231.59 65.68C232.991 65.658 234.385 65.4666 235.74 65.11V74.88C233.427 75.6856 230.988 76.0651 228.54 76C223.333 76 219.333 74.5333 216.54 71.6C213.747 68.6667 212.363 64.5167 212.39 59.15V23.11L225 19V33H235.74V42.44L225 42.48Z" fill="#262A38"/> -<path d="M86.19 3.74C88.3187 5.76368 89.6674 8.47181 90 11.39C89.9702 12.6575 89.6266 13.8978 89 15C88.26 16.54 79.23 32.17 76.54 36.48C75.0003 39.0373 74.1846 41.965 74.18 44.95C74.1737 47.9334 74.9903 50.8607 76.54 53.41C79.19 57.7 88.22 73.41 89 75C89.6147 76.0527 89.9584 77.2416 90 78.46C89.6193 81.3242 88.3066 83.9836 86.2643 86.0277C84.2221 88.0717 81.5639 89.3868 78.7 89.77C77.4606 89.7297 76.2535 89.3642 75.2 88.71C73.74 88 57.79 79.19 53.47 76.5C53.15 76.34 52.82 76.1 52.41 75.93L31.09 63.32C31.5455 67.4143 33.3833 71.2309 36.3 74.14C36.859 74.6987 37.4572 75.2167 38.09 75.69C37.5574 75.9283 37.0398 76.1988 36.54 76.5C32.23 79.19 16.52 88.22 15 89C13.9018 89.6362 12.659 89.9805 11.39 90C8.50046 89.7092 5.81442 88.3804 3.83 86.26C1.69884 84.2398 0.343537 81.5363 0 78.62C0.0647854 77.3847 0.428539 76.1837 1.06 75.12C1.79 73.57 10.82 57.87 13.51 53.55C15.0641 51.0026 15.881 48.074 15.87 45.09C15.8797 42.103 15.0629 39.1716 13.51 36.62C10.82 32.15 1.71 16.44 1.06 14.89C0.425939 13.8275 0.061986 12.6258 0 11.39C0.314569 8.48135 1.63782 5.7747 3.74 3.74C5.7747 1.63782 8.48135 0.314569 11.39 0C12.6588 0.0712787 13.8941 0.433998 15 1.06C16.3 1.63 27.77 8.3 33.88 11.88L35.26 12.7C35.6533 12.9649 36.0606 13.2086 36.48 13.43L37.13 13.84L58.84 26.7C58.5909 24.275 57.8495 21.9262 56.6614 19.7976C55.4733 17.6689 53.8633 15.805 51.93 14.32C52.4603 14.0835 52.9747 13.8129 53.47 13.51C57.79 10.82 73.47 1.71 75.04 1.06C76.1278 0.417179 77.3576 0.0530581 78.62 0C81.5003 0.332523 84.1758 1.65441 86.19 3.74ZM46.19 50.79L50.83 46.15C50.9837 46.0025 51.1059 45.8255 51.1894 45.6296C51.273 45.4337 51.316 45.223 51.316 45.01C51.316 44.797 51.273 44.5863 51.1894 44.3904C51.1059 44.1945 50.9837 44.0175 50.83 43.87L46.19 39.23C46.0425 39.0763 45.8655 38.9541 45.6696 38.8706C45.4737 38.787 45.263 38.744 45.05 38.744C44.837 38.744 44.6263 38.787 44.4304 38.8706C44.2345 38.9541 44.0575 39.0763 43.91 39.23L39.27 43.87C39.1163 44.0175 38.9941 44.1945 38.9106 44.3904C38.827 44.5863 38.784 44.797 38.784 45.01C38.784 45.223 38.827 45.4337 38.9106 45.6296C38.9941 45.8255 39.1163 46.0025 39.27 46.15L43.91 50.79C44.2227 51.0556 44.6197 51.2015 45.03 51.2015C45.4403 51.2015 45.8373 51.0556 46.15 50.79H46.19Z" fill="#FF694A"/> -<path d="M205.26 42.32C203.857 39.1815 201.566 36.5221 198.67 34.67C195.734 32.8669 192.345 31.9405 188.9 32C186.15 31.9157 183.426 32.5536 181 33.85C178.734 34.9861 176.866 36.7815 175.64 39V19.05H163V75.28H175.54V68.93C176.771 71.1426 178.643 72.931 180.91 74.06C183.368 75.3691 186.126 76.0136 188.91 75.93C192.346 76.0074 195.724 75.0416 198.6 73.1601C201.508 71.2299 203.819 68.5246 205.27 65.35C206.908 61.7206 207.714 57.7711 207.63 53.7901C207.71 49.8374 206.9 45.9174 205.26 42.32ZM192.48 63.1501C191.581 64.1744 190.474 64.9952 189.232 65.5574C187.99 66.1197 186.643 66.4105 185.28 66.4105C183.917 66.4105 182.57 66.1197 181.328 65.5574C180.086 64.9952 178.979 64.1744 178.08 63.1501C176.37 61 175.56 58 175.56 54C175.56 50 176.37 47.0801 178.08 44.8801C178.982 43.8648 180.088 43.0507 181.325 42.4908C182.563 41.9309 183.904 41.6376 185.262 41.6301C186.62 41.6225 187.965 41.9009 189.208 42.447C190.452 42.9932 191.567 43.7949 192.48 44.8C194.19 46.92 195.01 49.93 195.01 53.8C195 57.8601 194.19 61 192.48 63.1501Z" fill="#262A38"/> -<path d="M175.64 13.9099V19.0499H163.02V18.0699L175.64 13.9099Z" fill="#262A38"/> -<path d="M401.31 75.89C402.4 75.89 403.29 75.05 403.29 73.87C403.29 72.69 402.4 71.85 401.31 71.85C400.21 71.85 399.32 72.69 399.32 73.87C399.32 75.05 400.21 75.89 401.31 75.89ZM401.31 75.4C400.47 75.4 399.87 74.8 399.87 73.87C399.87 72.94 400.47 72.34 401.31 72.34C402.15 72.34 402.74 72.94 402.74 73.87C402.74 74.8 402.15 75.4 401.31 75.4ZM400.5 74.84H401.07V74.24H401.36L401.66 74.84H402.26L401.85 74.07C402.08 73.96 402.2 73.76 402.2 73.53C402.2 73.08 401.85 72.87 401.39 72.87H400.5V74.84ZM401.07 73.84V73.3H401.28C401.48 73.3 401.58 73.42 401.58 73.56C401.58 73.71 401.48 73.84 401.28 73.84H401.07Z" fill="#2D3142"/> -</svg> From 9a4ec9bca01f61370826ae82fff3806bfb470439 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Wed, 17 Jan 2024 15:31:16 -0500 Subject: [PATCH 006/114] point to dbt-common on pypi --- pyproject.toml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2ea8e1ee6..a43e2a11f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,9 +24,9 @@ classifiers = [ ] dependencies = [ "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", - "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", "psycopg2-binary~=2.9", - # installed via dbt-common but used directly, unpin minor to avoid version conflicts + # installed via dbt-adapter but used directly, unpin minor to avoid version conflicts + "dbt-common<1.0", "agate<2.0", ] [project.optional-dependencies] @@ -56,13 +56,13 @@ requires = ["hatchling"] build-backend = "hatchling.build" [tool.hatch.build.targets.sdist] -include = ["dbt/*"] +include = ["dbt"] [tool.hatch.build.targets.wheel] packages = ["dbt"] [tool.hatch.metadata] -# needed for installing `dbt-adapters` and `dbt-common` directly from github +# needed for installing `dbt-adapters` directly from github allow-direct-references = true [tool.hatch.version] From 0264d9fb9ab87787e6a7dfb923befe4fe6fded89 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Wed, 17 Jan 2024 17:33:13 -0500 Subject: [PATCH 007/114] point to dbt-adapters on pypi --- pyproject.toml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a43e2a11f..fc0e2585a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,9 +23,9 @@ classifiers = [ "Programming Language :: Python :: 3.11", ] dependencies = [ - "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", + "dbt-adapters<1.0", "psycopg2-binary~=2.9", - # installed via dbt-adapter but used directly, unpin minor to avoid version conflicts + # installed via dbt-adapters but used directly, unpin minor to avoid version conflicts "dbt-common<1.0", "agate<2.0", ] @@ -61,10 +61,6 @@ include = ["dbt"] [tool.hatch.build.targets.wheel] packages = ["dbt"] -[tool.hatch.metadata] -# needed for installing `dbt-adapters` directly from github -allow-direct-references = true - [tool.hatch.version] path = "dbt/adapters/postgres/__about__.py" From 804e273aeca06885f7efd72249c2c7fc3c86b920 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Mon, 22 Jan 2024 19:18:57 -0500 Subject: [PATCH 008/114] migrate unit tests, they pass --- dbt/adapters/postgres/connections.py | 3 +- dbt/adapters/postgres/impl.py | 8 +- pyproject.toml | 79 +++- tests/unit/test_postgres_adapter.py | 660 +++++++++++++++++++++++++++ tests/unit/utils.py | 147 ++++++ 5 files changed, 878 insertions(+), 19 deletions(-) create mode 100644 tests/unit/test_postgres_adapter.py create mode 100644 tests/unit/utils.py diff --git a/dbt/adapters/postgres/connections.py b/dbt/adapters/postgres/connections.py index d573f53bf..244b50c3d 100644 --- a/dbt/adapters/postgres/connections.py +++ b/dbt/adapters/postgres/connections.py @@ -2,8 +2,7 @@ from dataclasses import dataclass from typing import Optional -from dbt.adapters.base import Credentials -from dbt.adapters.contracts.connection import AdapterResponse +from dbt.adapters.contracts.connection import AdapterResponse, Credentials from dbt.adapters.events.logging import AdapterLogger from dbt.adapters.sql import SQLConnectionManager from dbt_common.exceptions import DbtDatabaseError, DbtRuntimeError diff --git a/dbt/adapters/postgres/impl.py b/dbt/adapters/postgres/impl.py index d4602f7c8..f5ace09a0 100644 --- a/dbt/adapters/postgres/impl.py +++ b/dbt/adapters/postgres/impl.py @@ -21,11 +21,9 @@ from dbt_common.exceptions import DbtRuntimeError from dbt_common.utils import encoding as dbt_encoding -from dbt.adapters.postgres import ( - PostgresColumn, - PostgresConnectionManager, - PostgresRelation, -) +from dbt.adapters.postgres.column import PostgresColumn +from dbt.adapters.postgres.connections import PostgresConnectionManager +from dbt.adapters.postgres.relation import PostgresRelation GET_RELATIONS_MACRO_NAME = "postgres__get_relations" diff --git a/pyproject.toml b/pyproject.toml index fc0e2585a..0067b0913 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", ] dependencies = [ - "dbt-adapters<1.0", + "dbt-adapters", "psycopg2-binary~=2.9", # installed via dbt-adapters but used directly, unpin minor to avoid version conflicts "dbt-common<1.0", @@ -34,15 +34,27 @@ lint = [ "black", "flake8", "Flake8-pyproject", +] +typecheck = [ "mypy", "types-protobuf", "types-pytz", ] test = [ + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core", + "freezegun", "pytest", "pytest-dotenv", "pytest-xdist", ] +integration = [ + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@migrate-dbt-tests-adapter#egg=dbt-tests-adapter&subdirectory=dbt-tests-adapter", +] +build = [ + "wheel", + "twine", + "check-wheel-contents", +] [project.urls] Homepage = "https://github.com/dbt-labs/dbt-postgres" @@ -55,6 +67,9 @@ Changelog = "https://github.com/dbt-labs/dbt-postgres/blob/main/CHANGELOG.md" requires = ["hatchling"] build-backend = "hatchling.build" +[tool.hatch.metadata] +allow-direct-references = true + [tool.hatch.build.targets.sdist] include = ["dbt"] @@ -65,18 +80,58 @@ packages = ["dbt"] path = "dbt/adapters/postgres/__about__.py" [tool.hatch.envs.default] -features = ["lint", "test"] -[tool.hatch.envs.default.scripts] -unit-tests = "- python -m pytest {args:tests/unit}" -lint-all = [ - "- lint-black", - "- lint-flake8", - "- lint-mypy", -] -lint-black = "python -m black ." -lint-flake8 = "python -m flake8 ." -lint-mypy = "python -m mypy ." +features = [ + "lint", + "typecheck", + "test", + "integration", + "build", +] + +[tool.hatch.envs.lint] +detached = true +features = ["lint"] +[tool.hatch.envs.lint.scripts] +all = [ + "- black-only", + "- flake8-only", +] +black-only = "python -m black ." +flake8-only = "python -m flake8 ." +[tool.hatch.envs.typecheck] +features = ["typecheck"] +[tool.hatch.envs.typecheck.scripts] +all = "python -m mypy ." + +[tool.hatch.envs.unit-tests] +features = ["test"] +[tool.hatch.envs.unit-tests.scripts] +all = "python -m pytest {args:tests/unit}" + +[tool.hatch.envs.integration-tests] +features = ["test", "integration"] +[tool.hatch.envs.integration-tests.scripts] +all = "python -m pytest {args:tests/functional}" + +[tool.hatch.envs.build] +detached = true +features = ["build"] +[tool.hatch.envs.build.scripts] +check-all = [ + "- check-wheel", + "- check-sdist", +] +check-wheel = [ + "twine check dist/*", + "find ./dist/dbt_adapters-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-adapters", +] +check-sdist = [ + "check-wheel-contents dist/*.whl --ignore W007,W008", + "find ./dist/dbt_adapters-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-adapters", +] [tool.black] line-length = 99 target-version = ['py38'] diff --git a/tests/unit/test_postgres_adapter.py b/tests/unit/test_postgres_adapter.py new file mode 100644 index 000000000..9278620f9 --- /dev/null +++ b/tests/unit/test_postgres_adapter.py @@ -0,0 +1,660 @@ +import dataclasses +import decimal +from multiprocessing import get_context +from unittest import TestCase, mock + +import agate +import pytest +from dbt.adapters.base import BaseRelation +from dbt.adapters.contracts.relation import Path +from dbt.context.manifest import generate_query_header_context +from dbt.context.providers import generate_runtime_macro_context +from dbt.contracts.files import FileHash +from dbt.contracts.graph.manifest import ManifestStateCheck +from dbt.task.debug import DebugTask +from dbt_common.clients import agate_helper +from dbt_common.exceptions import DbtConfigError, DbtValidationError +from psycopg2 import DatabaseError, extensions as psycopg2_extensions + +from dbt.adapters.postgres import Plugin as PostgresPlugin, PostgresAdapter + +from utils import ( + TestAdapterConversions, + clear_plugin, + config_from_parts_or_dicts, + inject_adapter, + load_internal_manifest_macros, + mock_connection, +) + + +# set_from_args(Namespace(WARN_ERROR=False), None) + + +class TestPostgresAdapter(TestCase): + def setUp(self): + project_cfg = { + "name": "X", + "version": "0.1", + "profile": "test", + "project-root": "/tmp/dbt/does-not-exist", + "config-version": 2, + } + profile_cfg = { + "outputs": { + "test": { + "type": "postgres", + "dbname": "postgres", + "user": "root", + "host": "thishostshouldnotexist", + "pass": "password", + "port": 5432, + "schema": "public", + } + }, + "target": "test", + } + + self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) + self.mp_context = get_context("spawn") + self._adapter = None + + @property + def adapter(self): + if self._adapter is None: + self._adapter = PostgresAdapter(self.config, self.mp_context) + inject_adapter(self._adapter, PostgresPlugin) + return self._adapter + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_acquire_connection_validations(self, psycopg2): + try: + connection = self.adapter.acquire_connection("dummy") + except DbtValidationError as e: + self.fail("got DbtValidationError: {}".format(str(e))) + except BaseException as e: + self.fail("acquiring connection failed with unknown exception: {}".format(str(e))) + self.assertEqual(connection.type, "postgres") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once() + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_acquire_connection(self, psycopg2): + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + self.assertEqual(connection.state, "open") + self.assertNotEqual(connection.handle, None) + psycopg2.connect.assert_called_once() + + def test_cancel_open_connections_empty(self): + self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) + + def test_cancel_open_connections_master(self): + key = self.adapter.connections.get_thread_identifier() + self.adapter.connections.thread_connections[key] = mock_connection("master") + self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) + + def test_cancel_open_connections_single(self): + master = mock_connection("master") + model = mock_connection("model") + key = self.adapter.connections.get_thread_identifier() + model.handle.get_backend_pid.return_value = 42 + self.adapter.connections.thread_connections.update( + { + key: master, + 1: model, + } + ) + with mock.patch.object(self.adapter.connections, "add_query") as add_query: + query_result = mock.MagicMock() + add_query.return_value = (None, query_result) + + self.assertEqual(len(list(self.adapter.cancel_open_connections())), 1) + + add_query.assert_called_once_with("select pg_terminate_backend(42)") + + master.handle.get_backend_pid.assert_not_called() + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_default_connect_timeout(self, psycopg2): + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + application_name="dbt", + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_changed_connect_timeout(self, psycopg2): + self.config.credentials = self.config.credentials.replace(connect_timeout=30) + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=30, + application_name="dbt", + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_default_keepalive(self, psycopg2): + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + application_name="dbt", + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_changed_keepalive(self, psycopg2): + self.config.credentials = self.config.credentials.replace(keepalives_idle=256) + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + keepalives_idle=256, + application_name="dbt", + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_default_application_name(self, psycopg2): + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + application_name="dbt", + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_changed_application_name(self, psycopg2): + self.config.credentials = self.config.credentials.replace(application_name="myapp") + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + application_name="myapp", + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_role(self, psycopg2): + self.config.credentials = self.config.credentials.replace(role="somerole") + connection = self.adapter.acquire_connection("dummy") + + cursor = connection.handle.cursor() + + cursor.execute.assert_called_once_with("set role somerole") + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_search_path(self, psycopg2): + self.config.credentials = self.config.credentials.replace(search_path="test") + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + application_name="dbt", + options="-c search_path=test", + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_sslmode(self, psycopg2): + self.config.credentials = self.config.credentials.replace(sslmode="require") + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + sslmode="require", + application_name="dbt", + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_ssl_parameters(self, psycopg2): + self.config.credentials = self.config.credentials.replace(sslmode="verify-ca") + self.config.credentials = self.config.credentials.replace(sslcert="service.crt") + self.config.credentials = self.config.credentials.replace(sslkey="service.key") + self.config.credentials = self.config.credentials.replace(sslrootcert="ca.crt") + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + sslmode="verify-ca", + sslcert="service.crt", + sslkey="service.key", + sslrootcert="ca.crt", + application_name="dbt", + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_schema_with_space(self, psycopg2): + self.config.credentials = self.config.credentials.replace(search_path="test test") + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + application_name="dbt", + options="-c search_path=test\ test", # noqa: [W605] + ) + + @mock.patch("dbt.adapters.postgres.connections.psycopg2") + def test_set_zero_keepalive(self, psycopg2): + self.config.credentials = self.config.credentials.replace(keepalives_idle=0) + connection = self.adapter.acquire_connection("dummy") + + psycopg2.connect.assert_not_called() + connection.handle + psycopg2.connect.assert_called_once_with( + dbname="postgres", + user="root", + host="thishostshouldnotexist", + password="password", + port=5432, + connect_timeout=10, + application_name="dbt", + ) + + @mock.patch.object(PostgresAdapter, "execute_macro") + @mock.patch.object(PostgresAdapter, "_get_catalog_relations") + def test_get_catalog_various_schemas(self, mock_get_relations, mock_execute): + self.catalog_test(mock_get_relations, mock_execute, False) + + @mock.patch.object(PostgresAdapter, "execute_macro") + @mock.patch.object(PostgresAdapter, "_get_catalog_relations") + def test_get_filtered_catalog(self, mock_get_relations, mock_execute): + self.catalog_test(mock_get_relations, mock_execute, True) + + def catalog_test(self, mock_get_relations, mock_execute, filtered=False): + column_names = ["table_database", "table_schema", "table_name"] + relations = [ + BaseRelation(path=Path(database="dbt", schema="foo", identifier="bar")), + BaseRelation(path=Path(database="dbt", schema="FOO", identifier="baz")), + BaseRelation(path=Path(database="dbt", schema=None, identifier="bar")), + BaseRelation(path=Path(database="dbt", schema="quux", identifier="bar")), + BaseRelation(path=Path(database="dbt", schema="skip", identifier="bar")), + ] + rows = list(map(lambda x: dataclasses.astuple(x.path), relations)) + mock_execute.return_value = agate.Table(rows=rows, column_names=column_names) + + mock_get_relations.return_value = relations + + relation_configs = [] + used_schemas = {("dbt", "foo"), ("dbt", "quux")} + + if filtered: + catalog, exceptions = self.adapter.get_filtered_catalog( + relation_configs, used_schemas, set([relations[0], relations[3]]) + ) + else: + catalog, exceptions = self.adapter.get_catalog(relation_configs, used_schemas) + + tupled_catalog = set(map(tuple, catalog)) + if filtered: + self.assertEqual(tupled_catalog, {rows[0], rows[3]}) + else: + self.assertEqual(tupled_catalog, {rows[0], rows[1], rows[3]}) + + self.assertEqual(exceptions, []) + + +class TestConnectingPostgresAdapter(TestCase): + def setUp(self): + self.target_dict = { + "type": "postgres", + "dbname": "postgres", + "user": "root", + "host": "thishostshouldnotexist", + "pass": "password", + "port": 5432, + "schema": "public", + } + + profile_cfg = { + "outputs": { + "test": self.target_dict, + }, + "target": "test", + } + project_cfg = { + "name": "X", + "version": "0.1", + "profile": "test", + "project-root": "/tmp/dbt/does-not-exist", + "quoting": { + "identifier": False, + "schema": True, + }, + "config-version": 2, + } + + self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) + self.mp_context = get_context("spawn") + + self.handle = mock.MagicMock(spec=psycopg2_extensions.connection) + self.cursor = self.handle.cursor.return_value + self.mock_execute = self.cursor.execute + self.patcher = mock.patch("dbt.adapters.postgres.connections.psycopg2") + self.psycopg2 = self.patcher.start() + + # Create the Manifest.state_check patcher + @mock.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") + def _mock_state_check(self): + all_projects = self.all_projects + return ManifestStateCheck( + vars_hash=FileHash.from_contents("vars"), + project_hashes={name: FileHash.from_contents(name) for name in all_projects}, + profile_hash=FileHash.from_contents("profile"), + ) + + self.load_state_check = mock.patch( + "dbt.parser.manifest.ManifestLoader.build_manifest_state_check" + ) + self.mock_state_check = self.load_state_check.start() + self.mock_state_check.side_effect = _mock_state_check + + self.psycopg2.connect.return_value = self.handle + self.adapter = PostgresAdapter(self.config, self.mp_context) + self.adapter.set_macro_resolver(load_internal_manifest_macros(self.config)) + self.adapter.set_macro_context_generator(generate_runtime_macro_context) + self.adapter.connections.set_query_header( + generate_query_header_context(self.config, self.adapter.get_macro_resolver()) + ) + self.qh_patch = mock.patch.object(self.adapter.connections.query_header, "add") + self.mock_query_header_add = self.qh_patch.start() + self.mock_query_header_add.side_effect = lambda q: "/* dbt */\n{}".format(q) + self.adapter.acquire_connection() + inject_adapter(self.adapter, PostgresPlugin) + + def tearDown(self): + # we want a unique self.handle every time. + self.adapter.cleanup_connections() + self.qh_patch.stop() + self.patcher.stop() + self.load_state_check.stop() + clear_plugin(PostgresPlugin) + + def test_quoting_on_drop_schema(self): + relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + quote_policy=self.adapter.config.quoting, + ) + self.adapter.drop_schema(relation) + + self.mock_execute.assert_has_calls( + [mock.call('/* dbt */\ndrop schema if exists "test_schema" cascade', None)] + ) + + def test_quoting_on_drop(self): + relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + identifier="test_table", + type="table", + quote_policy=self.adapter.config.quoting, + ) + self.adapter.drop_relation(relation) + self.mock_execute.assert_has_calls( + [ + mock.call( + '/* dbt */\ndrop table if exists "postgres"."test_schema".test_table cascade', + None, + ) + ] + ) + + def test_quoting_on_truncate(self): + relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + identifier="test_table", + type="table", + quote_policy=self.adapter.config.quoting, + ) + self.adapter.truncate_relation(relation) + self.mock_execute.assert_has_calls( + [mock.call('/* dbt */\ntruncate table "postgres"."test_schema".test_table', None)] + ) + + def test_quoting_on_rename(self): + from_relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + identifier="table_a", + type="table", + quote_policy=self.adapter.config.quoting, + ) + to_relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + identifier="table_b", + type="table", + quote_policy=self.adapter.config.quoting, + ) + + self.adapter.rename_relation(from_relation=from_relation, to_relation=to_relation) + self.mock_execute.assert_has_calls( + [ + mock.call( + '/* dbt */\nalter table "postgres"."test_schema".table_a rename to table_b', + None, + ) + ] + ) + + @pytest.mark.skip(""" + We moved from __version__ to __about__ when establishing `hatch` as our build tool. + However, `adapters.factory.register_adapter` assumes __version__ when determining + the adapter version. This test causes an import error + """) + def test_debug_connection_ok(self): + DebugTask.validate_connection(self.target_dict) + self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) + + def test_debug_connection_fail_nopass(self): + del self.target_dict["pass"] + with self.assertRaises(DbtConfigError): + DebugTask.validate_connection(self.target_dict) + + @pytest.mark.skip(""" + We moved from __version__ to __about__ when establishing `hatch` as our build tool. + However, `adapters.factory.register_adapter` assumes __version__ when determining + the adapter version. This test causes an import error + """) + def test_connection_fail_select(self): + self.mock_execute.side_effect = DatabaseError() + with self.assertRaises(DbtConfigError): + DebugTask.validate_connection(self.target_dict) + self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) + + def test_dbname_verification_is_case_insensitive(self): + # Override adapter settings from setUp() + self.target_dict["dbname"] = "Postgres" + profile_cfg = { + "outputs": { + "test": self.target_dict, + }, + "target": "test", + } + project_cfg = { + "name": "X", + "version": "0.1", + "profile": "test", + "project-root": "/tmp/dbt/does-not-exist", + "quoting": { + "identifier": False, + "schema": True, + }, + "config-version": 2, + } + self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) + self.mp_context = get_context("spawn") + self.adapter.cleanup_connections() + self._adapter = PostgresAdapter(self.config, self.mp_context) + self.adapter.verify_database("postgres") + + +class TestPostgresFilterCatalog(TestCase): + def test__catalog_filter_table(self): + used_schemas = [["a", "B"], ["a", "1234"]] + column_names = ["table_name", "table_database", "table_schema", "something"] + rows = [ + ["foo", "a", "b", "1234"], # include + ["foo", "a", "1234", "1234"], # include, w/ table schema as str + ["foo", "c", "B", "1234"], # skip + ["1234", "A", "B", "1234"], # include, w/ table name as str + ] + table = agate.Table(rows, column_names, agate_helper.DEFAULT_TYPE_TESTER) + + result = PostgresAdapter._catalog_filter_table(table, used_schemas) + assert len(result) == 3 + for row in result.rows: + assert isinstance(row["table_schema"], str) + assert isinstance(row["table_database"], str) + assert isinstance(row["table_name"], str) + assert isinstance(row["something"], decimal.Decimal) + + +class TestPostgresAdapterConversions(TestAdapterConversions): + def test_convert_text_type(self): + rows = [ + ["", "a1", "stringval1"], + ["", "a2", "stringvalasdfasdfasdfa"], + ["", "a3", "stringval3"], + ] + agate_table = self._make_table_of(rows, agate.Text) + expected = ["text", "text", "text"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_text_type(agate_table, col_idx) == expect + + def test_convert_number_type(self): + rows = [ + ["", "23.98", "-1"], + ["", "12.78", "-2"], + ["", "79.41", "-3"], + ] + agate_table = self._make_table_of(rows, agate.Number) + expected = ["integer", "float8", "integer"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_number_type(agate_table, col_idx) == expect + + def test_convert_boolean_type(self): + rows = [ + ["", "false", "true"], + ["", "false", "false"], + ["", "false", "true"], + ] + agate_table = self._make_table_of(rows, agate.Boolean) + expected = ["boolean", "boolean", "boolean"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_boolean_type(agate_table, col_idx) == expect + + def test_convert_datetime_type(self): + rows = [ + ["", "20190101T01:01:01Z", "2019-01-01 01:01:01"], + ["", "20190102T01:01:01Z", "2019-01-01 01:01:01"], + ["", "20190103T01:01:01Z", "2019-01-01 01:01:01"], + ] + agate_table = self._make_table_of( + rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime] + ) + expected = [ + "timestamp without time zone", + "timestamp without time zone", + "timestamp without time zone", + ] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_datetime_type(agate_table, col_idx) == expect + + def test_convert_date_type(self): + rows = [ + ["", "2019-01-01", "2019-01-04"], + ["", "2019-01-02", "2019-01-04"], + ["", "2019-01-03", "2019-01-04"], + ] + agate_table = self._make_table_of(rows, agate.Date) + expected = ["date", "date", "date"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_date_type(agate_table, col_idx) == expect + + def test_convert_time_type(self): + # dbt's default type testers actually don't have a TimeDelta at all. + agate.TimeDelta + rows = [ + ["", "120s", "10s"], + ["", "3m", "11s"], + ["", "1h", "12s"], + ] + agate_table = self._make_table_of(rows, agate.TimeDelta) + expected = ["time", "time", "time"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_time_type(agate_table, col_idx) == expect diff --git a/tests/unit/utils.py b/tests/unit/utils.py new file mode 100644 index 000000000..1abb56cb0 --- /dev/null +++ b/tests/unit/utils.py @@ -0,0 +1,147 @@ +import os +import string +from unittest import TestCase, mock + +import agate +from dbt.config.project import PartialProject + + +class Obj: + which = "blah" + single_threaded = False + + +def mock_connection(name, state="open"): + conn = mock.MagicMock() + conn.name = name + conn.state = state + return conn + + +def profile_from_dict(profile, profile_name, cli_vars="{}"): + from dbt.config import Profile + from dbt.config.renderer import ProfileRenderer + from dbt.config.utils import parse_cli_vars + + if not isinstance(cli_vars, dict): + cli_vars = parse_cli_vars(cli_vars) + + renderer = ProfileRenderer(cli_vars) + + # in order to call dbt's internal profile rendering, we need to set the + # flags global. This is a bit of a hack, but it's the best way to do it. + from dbt.flags import set_from_args + from argparse import Namespace + + set_from_args(Namespace(), None) + return Profile.from_raw_profile_info( + profile, + profile_name, + renderer, + ) + + +def project_from_dict(project, profile, packages=None, selectors=None, cli_vars="{}"): + from dbt.config.renderer import DbtProjectYamlRenderer + from dbt.config.utils import parse_cli_vars + + if not isinstance(cli_vars, dict): + cli_vars = parse_cli_vars(cli_vars) + + renderer = DbtProjectYamlRenderer(profile, cli_vars) + + project_root = project.pop("project-root", os.getcwd()) + + partial = PartialProject.from_dicts( + project_root=project_root, + project_dict=project, + packages_dict=packages, + selectors_dict=selectors, + ) + return partial.render(renderer) + + +def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars={}): + from dbt.config import Project, Profile, RuntimeConfig + from copy import deepcopy + + if isinstance(project, Project): + profile_name = project.profile_name + else: + profile_name = project.get("profile") + + if not isinstance(profile, Profile): + profile = profile_from_dict( + deepcopy(profile), + profile_name, + cli_vars, + ) + + if not isinstance(project, Project): + project = project_from_dict( + deepcopy(project), + profile, + packages, + selectors, + cli_vars, + ) + + args = Obj() + args.vars = cli_vars + args.profile_dir = "/dev/null" + return RuntimeConfig.from_parts(project=project, profile=profile, args=args) + + +def inject_plugin(plugin): + from dbt.adapters.factory import FACTORY + + key = plugin.adapter.type() + FACTORY.plugins[key] = plugin + + +def inject_adapter(value, plugin): + """Inject the given adapter into the adapter factory, so your hand-crafted + artisanal adapter will be available from get_adapter() as if dbt loaded it. + """ + inject_plugin(plugin) + from dbt.adapters.factory import FACTORY + + key = value.type() + FACTORY.adapters[key] = value + + +def clear_plugin(plugin): + from dbt.adapters.factory import FACTORY + + key = plugin.adapter.type() + FACTORY.plugins.pop(key, None) + FACTORY.adapters.pop(key, None) + + +class TestAdapterConversions(TestCase): + def _get_tester_for(self, column_type): + from dbt_common.clients import agate_helper + + if column_type is agate.TimeDelta: # dbt never makes this! + return agate.TimeDelta() + + for instance in agate_helper.DEFAULT_TYPE_TESTER._possible_types: + if isinstance(instance, column_type): # include child types + return instance + + raise ValueError(f"no tester for {column_type}") + + def _make_table_of(self, rows, column_types): + column_names = list(string.ascii_letters[: len(rows[0])]) + if isinstance(column_types, type): + column_types = [self._get_tester_for(column_types) for _ in column_names] + else: + column_types = [self._get_tester_for(typ) for typ in column_types] + table = agate.Table(rows, column_names=column_names, column_types=column_types) + return table + + +def load_internal_manifest_macros(config, macro_hook=lambda m: None): + from dbt.parser.manifest import ManifestLoader + + return ManifestLoader.load_macros(config, macro_hook) From ae7b1b4c917a4695923245fe179f165eed0bbed9 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Tue, 23 Jan 2024 16:57:12 -0500 Subject: [PATCH 009/114] migrate all functional tests from dbt-core, remove references to dbt.flags, fix imports --- tests/functional/README.md | 14 + tests/functional/__init__.py | 3 + .../data/results/v4/run_results.json | 1 + .../data/results/v5/run_results.json | 1 + .../data/results/v6/run_results.json | 1 + .../artifacts/data/state/v1/manifest.json | 1 + .../artifacts/data/state/v10/manifest.json | 1 + .../artifacts/data/state/v11/manifest.json | 1 + .../artifacts/data/state/v12/manifest.json | 1 + .../artifacts/data/state/v2/manifest.json | 1 + .../artifacts/data/state/v3/manifest.json | 1 + .../artifacts/data/state/v4/manifest.json | 1 + .../artifacts/data/state/v5/manifest.json | 1 + .../artifacts/data/state/v6/manifest.json | 1 + .../artifacts/data/state/v7/manifest.json | 1 + .../artifacts/data/state/v8/manifest.json | 1 + .../artifacts/data/state/v9/manifest.json | 1 + .../functional/artifacts/expected_manifest.py | 1939 +++++++++++++++++ .../artifacts/expected_run_results.py | 243 +++ .../artifacts/test_artifact_fields.py | 50 + tests/functional/artifacts/test_artifacts.py | 710 ++++++ .../artifacts/test_docs_generate_defer.py | 42 + tests/functional/artifacts/test_override.py | 37 + .../artifacts/test_previous_version_state.py | 452 ++++ .../functional/artifacts/test_run_results.py | 74 + tests/functional/basic/data/seed-initial.csv | 101 + tests/functional/basic/data/seed-update.csv | 201 ++ .../basic/data/summary_expected.csv | 3 + .../basic/data/summary_expected_update.csv | 3 + .../functional/basic/data/varchar10_seed.sql | 110 + .../functional/basic/data/varchar300_seed.sql | 103 + tests/functional/basic/test_basic.py | 20 + .../basic/test_invalid_reference.py | 28 + tests/functional/basic/test_jaffle_shop.py | 25 + tests/functional/basic/test_mixed_case_db.py | 47 + tests/functional/basic/test_project.py | 120 + .../functional/basic/test_simple_reference.py | 271 +++ .../functional/basic/test_varchar_widening.py | 56 + tests/functional/build_command/fixtures.py | 302 +++ tests/functional/build_command/test_build.py | 211 ++ tests/functional/cli/test_cli_exit_codes.py | 37 + .../cli/test_env_var_deprecations.py | 60 + tests/functional/cli/test_error_handling.py | 19 + tests/functional/cli/test_multioption.py | 142 ++ tests/functional/cli/test_resolvers.py | 36 + tests/functional/compile/fixtures.py | 58 + tests/functional/compile/test_compile.py | 218 ++ tests/functional/configs/fixtures.py | 201 ++ tests/functional/configs/test_configs.py | 138 ++ .../configs/test_configs_in_schema_files.py | 257 +++ .../configs/test_contract_configs.py | 532 +++++ .../test_custom_node_colors_configs.py | 345 +++ .../configs/test_disabled_configs.py | 90 + .../functional/configs/test_disabled_model.py | 390 ++++ tests/functional/configs/test_dupe_paths.py | 74 + tests/functional/configs/test_get_default.py | 26 + .../functional/configs/test_grant_configs.py | 155 ++ tests/functional/configs/test_indiv_tests.py | 58 + .../functional/configs/test_unused_configs.py | 52 + tests/functional/conftest.py | 11 + .../context_methods/first_dependency.py | 95 + .../context_methods/test_builtin_functions.py | 163 ++ .../context_methods/test_cli_var_override.py | 67 + .../context_methods/test_cli_vars.py | 205 ++ .../context_methods/test_custom_env_vars.py | 35 + .../context_methods/test_env_vars.py | 193 ++ .../context_methods/test_secret_env_vars.py | 184 ++ .../context_methods/test_var_dependency.py | 82 + .../test_var_in_generate_name.py | 43 + .../context_methods/test_yaml_functions.py | 49 + .../contracts/test_contract_enforcement.py | 44 + .../contracts/test_contract_precision.py | 63 + .../contracts/test_nonstandard_data_type.py | 76 + tests/functional/custom_aliases/fixtures.py | 68 + .../custom_aliases/test_custom_aliases.py | 31 + .../data/seed_expected.sql | 113 + .../test_custom_singular_tests.py | 110 + tests/functional/dbt_runner.py | 43 + tests/functional/defer_state/fixtures.py | 424 ++++ .../defer_state/test_defer_state.py | 329 +++ .../defer_state/test_group_updates.py | 108 + .../defer_state/test_modified_state.py | 969 ++++++++ .../defer_state/test_run_results_state.py | 481 ++++ tests/functional/dependencies/data/seed.sql | 586 +++++ tests/functional/dependencies/data/update.sql | 7 + .../duplicate_dependency/dbt_project.yml | 5 + .../early_hook_dependency/dbt_project.yml | 6 + .../late_hook_dependency/dbt_project.yml | 6 + .../local_dependency/dbt_project.yml | 23 + .../local_dependency/macros/dep_macro.sql | 3 + .../macros/generate_schema_name.sql | 15 + .../models/model_to_import.sql | 1 + .../local_dependency/models/schema.yml | 11 + .../local_dependency/seeds/seed.csv | 2 + .../models_local/dep_source_model.sql | 2 + .../models_local/my_configured_model.sql | 4 + .../dependencies/models_local/my_model.sql | 2 + .../dependencies/models_local/schema.yml | 7 + .../models_local/source_override_model.sql | 2 + .../dependencies/test_dependency_options.py | 106 + .../dependencies/test_local_dependency.py | 352 +++ .../dependencies/test_simple_dependency.py | 435 ++++ .../test_simple_dependency_with_configs.py | 106 + tests/functional/deprecations/fixtures.py | 101 + .../deprecations/model_deprecations.py | 106 + .../deprecations/test_config_deprecations.py | 148 ++ .../deprecations/test_deprecations.py | 148 ++ .../docs/test_duplicate_docs_block.py | 34 + tests/functional/docs/test_generate.py | 100 + .../functional/docs/test_good_docs_blocks.py | 177 ++ tests/functional/docs/test_invalid_doc_ref.py | 46 + .../docs/test_missing_docs_blocks.py | 42 + .../docs/test_model_version_docs_blocks.py | 74 + tests/functional/docs/test_static.py | 50 + .../duplicates/test_duplicate_analysis.py | 32 + .../duplicates/test_duplicate_exposure.py | 30 + .../duplicates/test_duplicate_macro.py | 71 + .../duplicates/test_duplicate_metric.py | 40 + .../duplicates/test_duplicate_model.py | 263 +++ .../duplicates/test_duplicate_resource.py | 33 + .../duplicates/test_duplicate_source.py | 26 + tests/functional/exit_codes/fixtures.py | 65 + .../functional/exit_codes/test_exit_codes.py | 129 ++ tests/functional/exposures/fixtures.py | 161 ++ .../exposures/test_exposure_configs.py | 121 + tests/functional/exposures/test_exposures.py | 40 + .../graph_selection/test_graph_selection.py | 307 +++ .../graph_selection/test_group_selection.py | 116 + .../test_intersection_syntax.py | 238 ++ .../test_schema_test_graph_selection.py | 129 ++ .../graph_selection/test_tag_selection.py | 169 ++ .../graph_selection/test_version_selection.py | 131 ++ .../incremental_schema_tests/fixtures.py | 394 ++++ .../test_incremental_schema.py | 120 + .../test_invalid_models.py | 225 ++ .../invalid_model_tests/test_model_warning.py | 18 + tests/functional/list/fixtures.py | 213 ++ tests/functional/list/test_list.py | 797 +++++++ tests/functional/logging/test_logging.py | 98 + tests/functional/logging/test_meta_logging.py | 46 + tests/functional/macros/data/seed.sql | 23 + tests/functional/macros/fixtures.py | 168 ++ .../package_macro_overrides/dbt_project.yml | 7 + .../package_macro_overrides/macros/macros.sql | 3 + tests/functional/macros/test_macros.py | 272 +++ tests/functional/materializations/conftest.py | 370 ++++ tests/functional/materializations/fixtures.py | 245 +++ .../test_materialized_view.py | 117 + .../materialized_view_tests/utils.py | 73 + .../test_custom_materialization.py | 80 + .../materializations/test_incremental.py | 48 + .../test_runtime_materialization.py | 204 ++ .../test_supported_languages.py | 100 + tests/functional/metrics/fixtures.py | 666 ++++++ .../functional/metrics/test_metric_configs.py | 206 ++ .../metrics/test_metric_deferral.py | 83 + .../metrics/test_metric_helper_functions.py | 54 + tests/functional/metrics/test_metrics.py | 399 ++++ tests/functional/minimal_cli/fixtures.py | 111 + .../minimal_cli/test_minimal_cli.py | 62 + tests/functional/partial_parsing/fixtures.py | 1228 +++++++++++ .../partial_parsing/test_file_diff.py | 64 + .../partial_parsing/test_partial_parsing.py | 824 +++++++ .../test_pp_disabled_config.py | 224 ++ .../partial_parsing/test_pp_docs.py | 257 +++ .../partial_parsing/test_pp_groups.py | 156 ++ .../partial_parsing/test_pp_metrics.py | 85 + .../partial_parsing/test_pp_vars.py | 405 ++++ .../partial_parsing/test_versioned_models.py | 128 ++ tests/functional/postgres/fixtures.py | 134 ++ tests/functional/postgres/test_indexes.py | 148 ++ tests/functional/profiles/test_profile_dir.py | 172 ++ .../functional/profiles/test_profiles_yml.py | 65 + tests/functional/projects/__init__.py | 3 + .../projects/dbt_integration/__init__.py | 21 + .../dbt_integration/macros/do_something.sql | 7 + .../dbt_integration/models/incremental.sql | 10 + .../projects/dbt_integration/models/table.sql | 2 + .../projects/dbt_integration/models/view.sql | 2 + .../dbt_integration/schemas/project.yml | 16 + .../dbt_integration/schemas/schema.yml | 8 + .../projects/graph_selection/__init__.py | 44 + .../projects/graph_selection/data/seed.csv | 101 + .../graph_selection/data/summary_expected.csv | 3 + .../models/alternative_users.sql | 7 + .../graph_selection/models/base_users.sql | 6 + .../graph_selection/models/emails.sql | 6 + .../graph_selection/models/emails_alt.sql | 1 + .../graph_selection/models/nested_users.sql | 1 + .../graph_selection/models/never_selected.sql | 2 + .../graph_selection/models/subdir.sql | 1 + .../projects/graph_selection/models/users.sql | 6 + .../graph_selection/models/users_rollup.sql | 14 + .../models/users_rollup_dependency.sql | 2 + .../schemas/patch_path_selection.yml | 5 + .../graph_selection/schemas/properties.yml | 8 + .../graph_selection/schemas/schema.yml | 78 + .../projects/jaffle_shop/__init__.py | 58 + .../jaffle_shop/data/raw_customers.csv | 101 + .../projects/jaffle_shop/data/raw_orders.csv | 100 + .../jaffle_shop/data/raw_payments.csv | 114 + .../projects/jaffle_shop/docs/docs.md | 14 + .../projects/jaffle_shop/docs/overview.md | 11 + .../projects/jaffle_shop/models/customers.sql | 69 + .../projects/jaffle_shop/models/orders.sql | 56 + .../jaffle_shop/schemas/jaffle_shop.yml | 82 + .../projects/jaffle_shop/schemas/staging.yml | 31 + .../jaffle_shop/staging/stg_customers.sql | 22 + .../jaffle_shop/staging/stg_orders.sql | 23 + .../jaffle_shop/staging/stg_payments.sql | 25 + tests/functional/projects/utils.py | 20 + tests/functional/retry/fixtures.py | 60 + tests/functional/retry/test_retry.py | 330 +++ tests/functional/run_operations/fixtures.py | 72 + .../run_operations/test_run_operations.py | 144 ++ tests/functional/saved_queries/fixtures.py | 93 + .../functional/saved_queries/test_configs.py | 186 ++ .../saved_queries/test_saved_query_build.py | 41 + .../saved_queries/test_saved_query_parsing.py | 113 + tests/functional/schema/fixtures/macros.py | 37 + tests/functional/schema/fixtures/sql.py | 128 ++ tests/functional/schema/test_custom_schema.py | 220 ++ tests/functional/schema_tests/data/seed.sql | 117 + .../schema_tests/data/seed_failure.sql | 116 + tests/functional/schema_tests/fixtures.py | 1275 +++++++++++ .../schema_tests/test_schema_v2_tests.py | 1130 ++++++++++ .../functional/selected_resources/fixtures.py | 35 + .../test_selected_resources.py | 105 + tests/functional/semantic_models/fixtures.py | 322 +++ .../test_semantic_model_configs.py | 227 ++ .../test_semantic_model_parsing.py | 148 ++ .../semantic_models/test_semantic_models.py | 98 + tests/functional/show/fixtures.py | 109 + tests/functional/show/test_show.py | 194 ++ .../data/invalidate_postgres.sql | 27 + .../simple_snapshot/data/seed_pg.sql | 223 ++ .../simple_snapshot/data/shared_macros.sql | 80 + .../simple_snapshot/data/update.sql | 261 +++ tests/functional/simple_snapshot/fixtures.py | 389 ++++ .../simple_snapshot/test_basic_snapshot.py | 373 ++++ .../test_changing_check_cols_snapshot.py | 127 ++ .../test_changing_strategy_snapshot.py | 128 ++ .../test_check_cols_snapshot.py | 114 + .../test_check_cols_updated_at_snapshot.py | 114 + .../test_comment_ending_snapshot.py | 36 + .../test_cross_schema_snapshot.py | 48 + .../test_hard_delete_snapshot.py | 192 ++ .../test_invalid_namespace_snapshot.py | 67 + .../test_long_text_snapshot.py | 70 + .../test_missing_strategy_snapshot.py | 51 + .../test_renamed_source_snapshot.py | 74 + .../test_select_exclude_snapshot.py | 161 ++ .../test_slow_query_snapshot.py | 82 + tests/functional/source_overrides/fixtures.py | 387 ++++ .../test_simple_source_override.py | 146 ++ .../test_source_overrides_duplicate_model.py | 68 + .../functional/sources/common_source_setup.py | 67 + tests/functional/sources/data/seed.sql | 113 + tests/functional/sources/fixtures.py | 474 ++++ .../functional/sources/test_simple_source.py | 196 ++ .../functional/sources/test_source_configs.py | 181 ++ .../sources/test_source_fresher_state.py | 705 ++++++ .../sources/test_source_freshness.py | 403 ++++ tests/functional/statements/fixtures.py | 182 ++ .../functional/statements/test_statements.py | 64 + tests/functional/test_access.py | 479 ++++ tests/functional/test_analyses.py | 72 + tests/functional/test_catalog.py | 5 + tests/functional/test_clean.py | 55 + tests/functional/test_colors.py | 44 + tests/functional/test_column_quotes.py | 100 + tests/functional/test_config.py | 402 ++++ tests/functional/test_connection_manager.py | 77 + tests/functional/test_custom_target_path.py | 34 + tests/functional/test_cycles.py | 68 + tests/functional/test_dbt_runner.py | 73 + tests/functional/test_default_selectors.py | 99 + tests/functional/test_events.py | 33 + tests/functional/test_experimental_parser.py | 303 +++ tests/functional/test_external_reference.py | 58 + tests/functional/test_fail_fast.py | 64 + tests/functional/test_init.py | 845 +++++++ tests/functional/test_ref_override.py | 145 ++ tests/functional/test_relation_name.py | 126 ++ tests/functional/test_selection/conftest.py | 96 + .../test_selection_expansion.py | 567 +++++ tests/functional/test_severity.py | 122 ++ tests/functional/test_store_test_failures.py | 46 + tests/functional/test_thread_count.py | 25 + tests/functional/test_timezones.py | 64 + tests/functional/test_types.py | 34 + tests/functional/test_unlogged_table.py | 73 + tests/functional/unit_testing/fixtures.py | 600 +++++ .../unit_testing/test_csv_fixtures.py | 251 +++ tests/functional/unit_testing/test_state.py | 130 ++ .../unit_testing/test_unit_testing.py | 236 ++ .../unit_testing/test_ut_dependency.py | 115 + .../unit_testing/test_ut_sources.py | 104 + tests/functional/utils.py | 14 + 299 files changed, 43496 insertions(+) create mode 100644 tests/functional/README.md create mode 100644 tests/functional/__init__.py create mode 100644 tests/functional/artifacts/data/results/v4/run_results.json create mode 100644 tests/functional/artifacts/data/results/v5/run_results.json create mode 100644 tests/functional/artifacts/data/results/v6/run_results.json create mode 100644 tests/functional/artifacts/data/state/v1/manifest.json create mode 100644 tests/functional/artifacts/data/state/v10/manifest.json create mode 100644 tests/functional/artifacts/data/state/v11/manifest.json create mode 100644 tests/functional/artifacts/data/state/v12/manifest.json create mode 100644 tests/functional/artifacts/data/state/v2/manifest.json create mode 100644 tests/functional/artifacts/data/state/v3/manifest.json create mode 100644 tests/functional/artifacts/data/state/v4/manifest.json create mode 100644 tests/functional/artifacts/data/state/v5/manifest.json create mode 100644 tests/functional/artifacts/data/state/v6/manifest.json create mode 100644 tests/functional/artifacts/data/state/v7/manifest.json create mode 100644 tests/functional/artifacts/data/state/v8/manifest.json create mode 100644 tests/functional/artifacts/data/state/v9/manifest.json create mode 100644 tests/functional/artifacts/expected_manifest.py create mode 100644 tests/functional/artifacts/expected_run_results.py create mode 100644 tests/functional/artifacts/test_artifact_fields.py create mode 100644 tests/functional/artifacts/test_artifacts.py create mode 100644 tests/functional/artifacts/test_docs_generate_defer.py create mode 100644 tests/functional/artifacts/test_override.py create mode 100644 tests/functional/artifacts/test_previous_version_state.py create mode 100644 tests/functional/artifacts/test_run_results.py create mode 100644 tests/functional/basic/data/seed-initial.csv create mode 100644 tests/functional/basic/data/seed-update.csv create mode 100644 tests/functional/basic/data/summary_expected.csv create mode 100644 tests/functional/basic/data/summary_expected_update.csv create mode 100644 tests/functional/basic/data/varchar10_seed.sql create mode 100644 tests/functional/basic/data/varchar300_seed.sql create mode 100644 tests/functional/basic/test_basic.py create mode 100644 tests/functional/basic/test_invalid_reference.py create mode 100644 tests/functional/basic/test_jaffle_shop.py create mode 100644 tests/functional/basic/test_mixed_case_db.py create mode 100644 tests/functional/basic/test_project.py create mode 100644 tests/functional/basic/test_simple_reference.py create mode 100644 tests/functional/basic/test_varchar_widening.py create mode 100644 tests/functional/build_command/fixtures.py create mode 100644 tests/functional/build_command/test_build.py create mode 100644 tests/functional/cli/test_cli_exit_codes.py create mode 100644 tests/functional/cli/test_env_var_deprecations.py create mode 100644 tests/functional/cli/test_error_handling.py create mode 100644 tests/functional/cli/test_multioption.py create mode 100644 tests/functional/cli/test_resolvers.py create mode 100644 tests/functional/compile/fixtures.py create mode 100644 tests/functional/compile/test_compile.py create mode 100644 tests/functional/configs/fixtures.py create mode 100644 tests/functional/configs/test_configs.py create mode 100644 tests/functional/configs/test_configs_in_schema_files.py create mode 100644 tests/functional/configs/test_contract_configs.py create mode 100644 tests/functional/configs/test_custom_node_colors_configs.py create mode 100644 tests/functional/configs/test_disabled_configs.py create mode 100644 tests/functional/configs/test_disabled_model.py create mode 100644 tests/functional/configs/test_dupe_paths.py create mode 100644 tests/functional/configs/test_get_default.py create mode 100644 tests/functional/configs/test_grant_configs.py create mode 100644 tests/functional/configs/test_indiv_tests.py create mode 100644 tests/functional/configs/test_unused_configs.py create mode 100644 tests/functional/conftest.py create mode 100644 tests/functional/context_methods/first_dependency.py create mode 100644 tests/functional/context_methods/test_builtin_functions.py create mode 100644 tests/functional/context_methods/test_cli_var_override.py create mode 100644 tests/functional/context_methods/test_cli_vars.py create mode 100644 tests/functional/context_methods/test_custom_env_vars.py create mode 100644 tests/functional/context_methods/test_env_vars.py create mode 100644 tests/functional/context_methods/test_secret_env_vars.py create mode 100644 tests/functional/context_methods/test_var_dependency.py create mode 100644 tests/functional/context_methods/test_var_in_generate_name.py create mode 100644 tests/functional/context_methods/test_yaml_functions.py create mode 100644 tests/functional/contracts/test_contract_enforcement.py create mode 100644 tests/functional/contracts/test_contract_precision.py create mode 100644 tests/functional/contracts/test_nonstandard_data_type.py create mode 100644 tests/functional/custom_aliases/fixtures.py create mode 100644 tests/functional/custom_aliases/test_custom_aliases.py create mode 100644 tests/functional/custom_singular_tests/data/seed_expected.sql create mode 100644 tests/functional/custom_singular_tests/test_custom_singular_tests.py create mode 100644 tests/functional/dbt_runner.py create mode 100644 tests/functional/defer_state/fixtures.py create mode 100644 tests/functional/defer_state/test_defer_state.py create mode 100644 tests/functional/defer_state/test_group_updates.py create mode 100644 tests/functional/defer_state/test_modified_state.py create mode 100644 tests/functional/defer_state/test_run_results_state.py create mode 100644 tests/functional/dependencies/data/seed.sql create mode 100644 tests/functional/dependencies/data/update.sql create mode 100644 tests/functional/dependencies/duplicate_dependency/dbt_project.yml create mode 100644 tests/functional/dependencies/early_hook_dependency/dbt_project.yml create mode 100644 tests/functional/dependencies/late_hook_dependency/dbt_project.yml create mode 100644 tests/functional/dependencies/local_dependency/dbt_project.yml create mode 100644 tests/functional/dependencies/local_dependency/macros/dep_macro.sql create mode 100644 tests/functional/dependencies/local_dependency/macros/generate_schema_name.sql create mode 100644 tests/functional/dependencies/local_dependency/models/model_to_import.sql create mode 100644 tests/functional/dependencies/local_dependency/models/schema.yml create mode 100644 tests/functional/dependencies/local_dependency/seeds/seed.csv create mode 100644 tests/functional/dependencies/models_local/dep_source_model.sql create mode 100644 tests/functional/dependencies/models_local/my_configured_model.sql create mode 100644 tests/functional/dependencies/models_local/my_model.sql create mode 100644 tests/functional/dependencies/models_local/schema.yml create mode 100644 tests/functional/dependencies/models_local/source_override_model.sql create mode 100644 tests/functional/dependencies/test_dependency_options.py create mode 100644 tests/functional/dependencies/test_local_dependency.py create mode 100644 tests/functional/dependencies/test_simple_dependency.py create mode 100644 tests/functional/dependencies/test_simple_dependency_with_configs.py create mode 100644 tests/functional/deprecations/fixtures.py create mode 100644 tests/functional/deprecations/model_deprecations.py create mode 100644 tests/functional/deprecations/test_config_deprecations.py create mode 100644 tests/functional/deprecations/test_deprecations.py create mode 100644 tests/functional/docs/test_duplicate_docs_block.py create mode 100644 tests/functional/docs/test_generate.py create mode 100644 tests/functional/docs/test_good_docs_blocks.py create mode 100644 tests/functional/docs/test_invalid_doc_ref.py create mode 100644 tests/functional/docs/test_missing_docs_blocks.py create mode 100644 tests/functional/docs/test_model_version_docs_blocks.py create mode 100644 tests/functional/docs/test_static.py create mode 100644 tests/functional/duplicates/test_duplicate_analysis.py create mode 100644 tests/functional/duplicates/test_duplicate_exposure.py create mode 100644 tests/functional/duplicates/test_duplicate_macro.py create mode 100644 tests/functional/duplicates/test_duplicate_metric.py create mode 100644 tests/functional/duplicates/test_duplicate_model.py create mode 100644 tests/functional/duplicates/test_duplicate_resource.py create mode 100644 tests/functional/duplicates/test_duplicate_source.py create mode 100644 tests/functional/exit_codes/fixtures.py create mode 100644 tests/functional/exit_codes/test_exit_codes.py create mode 100644 tests/functional/exposures/fixtures.py create mode 100644 tests/functional/exposures/test_exposure_configs.py create mode 100644 tests/functional/exposures/test_exposures.py create mode 100644 tests/functional/graph_selection/test_graph_selection.py create mode 100644 tests/functional/graph_selection/test_group_selection.py create mode 100644 tests/functional/graph_selection/test_intersection_syntax.py create mode 100644 tests/functional/graph_selection/test_schema_test_graph_selection.py create mode 100644 tests/functional/graph_selection/test_tag_selection.py create mode 100644 tests/functional/graph_selection/test_version_selection.py create mode 100644 tests/functional/incremental_schema_tests/fixtures.py create mode 100644 tests/functional/incremental_schema_tests/test_incremental_schema.py create mode 100644 tests/functional/invalid_model_tests/test_invalid_models.py create mode 100644 tests/functional/invalid_model_tests/test_model_warning.py create mode 100644 tests/functional/list/fixtures.py create mode 100644 tests/functional/list/test_list.py create mode 100644 tests/functional/logging/test_logging.py create mode 100644 tests/functional/logging/test_meta_logging.py create mode 100644 tests/functional/macros/data/seed.sql create mode 100644 tests/functional/macros/fixtures.py create mode 100644 tests/functional/macros/package_macro_overrides/dbt_project.yml create mode 100644 tests/functional/macros/package_macro_overrides/macros/macros.sql create mode 100644 tests/functional/macros/test_macros.py create mode 100644 tests/functional/materializations/conftest.py create mode 100644 tests/functional/materializations/fixtures.py create mode 100644 tests/functional/materializations/materialized_view_tests/test_materialized_view.py create mode 100644 tests/functional/materializations/materialized_view_tests/utils.py create mode 100644 tests/functional/materializations/test_custom_materialization.py create mode 100644 tests/functional/materializations/test_incremental.py create mode 100644 tests/functional/materializations/test_runtime_materialization.py create mode 100644 tests/functional/materializations/test_supported_languages.py create mode 100644 tests/functional/metrics/fixtures.py create mode 100644 tests/functional/metrics/test_metric_configs.py create mode 100644 tests/functional/metrics/test_metric_deferral.py create mode 100644 tests/functional/metrics/test_metric_helper_functions.py create mode 100644 tests/functional/metrics/test_metrics.py create mode 100644 tests/functional/minimal_cli/fixtures.py create mode 100644 tests/functional/minimal_cli/test_minimal_cli.py create mode 100644 tests/functional/partial_parsing/fixtures.py create mode 100644 tests/functional/partial_parsing/test_file_diff.py create mode 100644 tests/functional/partial_parsing/test_partial_parsing.py create mode 100644 tests/functional/partial_parsing/test_pp_disabled_config.py create mode 100644 tests/functional/partial_parsing/test_pp_docs.py create mode 100644 tests/functional/partial_parsing/test_pp_groups.py create mode 100644 tests/functional/partial_parsing/test_pp_metrics.py create mode 100644 tests/functional/partial_parsing/test_pp_vars.py create mode 100644 tests/functional/partial_parsing/test_versioned_models.py create mode 100644 tests/functional/postgres/fixtures.py create mode 100644 tests/functional/postgres/test_indexes.py create mode 100644 tests/functional/profiles/test_profile_dir.py create mode 100644 tests/functional/profiles/test_profiles_yml.py create mode 100644 tests/functional/projects/__init__.py create mode 100644 tests/functional/projects/dbt_integration/__init__.py create mode 100644 tests/functional/projects/dbt_integration/macros/do_something.sql create mode 100644 tests/functional/projects/dbt_integration/models/incremental.sql create mode 100644 tests/functional/projects/dbt_integration/models/table.sql create mode 100644 tests/functional/projects/dbt_integration/models/view.sql create mode 100644 tests/functional/projects/dbt_integration/schemas/project.yml create mode 100644 tests/functional/projects/dbt_integration/schemas/schema.yml create mode 100644 tests/functional/projects/graph_selection/__init__.py create mode 100644 tests/functional/projects/graph_selection/data/seed.csv create mode 100644 tests/functional/projects/graph_selection/data/summary_expected.csv create mode 100644 tests/functional/projects/graph_selection/models/alternative_users.sql create mode 100644 tests/functional/projects/graph_selection/models/base_users.sql create mode 100644 tests/functional/projects/graph_selection/models/emails.sql create mode 100644 tests/functional/projects/graph_selection/models/emails_alt.sql create mode 100644 tests/functional/projects/graph_selection/models/nested_users.sql create mode 100644 tests/functional/projects/graph_selection/models/never_selected.sql create mode 100644 tests/functional/projects/graph_selection/models/subdir.sql create mode 100644 tests/functional/projects/graph_selection/models/users.sql create mode 100644 tests/functional/projects/graph_selection/models/users_rollup.sql create mode 100644 tests/functional/projects/graph_selection/models/users_rollup_dependency.sql create mode 100644 tests/functional/projects/graph_selection/schemas/patch_path_selection.yml create mode 100644 tests/functional/projects/graph_selection/schemas/properties.yml create mode 100644 tests/functional/projects/graph_selection/schemas/schema.yml create mode 100644 tests/functional/projects/jaffle_shop/__init__.py create mode 100644 tests/functional/projects/jaffle_shop/data/raw_customers.csv create mode 100644 tests/functional/projects/jaffle_shop/data/raw_orders.csv create mode 100644 tests/functional/projects/jaffle_shop/data/raw_payments.csv create mode 100644 tests/functional/projects/jaffle_shop/docs/docs.md create mode 100644 tests/functional/projects/jaffle_shop/docs/overview.md create mode 100644 tests/functional/projects/jaffle_shop/models/customers.sql create mode 100644 tests/functional/projects/jaffle_shop/models/orders.sql create mode 100644 tests/functional/projects/jaffle_shop/schemas/jaffle_shop.yml create mode 100644 tests/functional/projects/jaffle_shop/schemas/staging.yml create mode 100644 tests/functional/projects/jaffle_shop/staging/stg_customers.sql create mode 100644 tests/functional/projects/jaffle_shop/staging/stg_orders.sql create mode 100644 tests/functional/projects/jaffle_shop/staging/stg_payments.sql create mode 100644 tests/functional/projects/utils.py create mode 100644 tests/functional/retry/fixtures.py create mode 100644 tests/functional/retry/test_retry.py create mode 100644 tests/functional/run_operations/fixtures.py create mode 100644 tests/functional/run_operations/test_run_operations.py create mode 100644 tests/functional/saved_queries/fixtures.py create mode 100644 tests/functional/saved_queries/test_configs.py create mode 100644 tests/functional/saved_queries/test_saved_query_build.py create mode 100644 tests/functional/saved_queries/test_saved_query_parsing.py create mode 100644 tests/functional/schema/fixtures/macros.py create mode 100644 tests/functional/schema/fixtures/sql.py create mode 100644 tests/functional/schema/test_custom_schema.py create mode 100644 tests/functional/schema_tests/data/seed.sql create mode 100644 tests/functional/schema_tests/data/seed_failure.sql create mode 100644 tests/functional/schema_tests/fixtures.py create mode 100644 tests/functional/schema_tests/test_schema_v2_tests.py create mode 100644 tests/functional/selected_resources/fixtures.py create mode 100644 tests/functional/selected_resources/test_selected_resources.py create mode 100644 tests/functional/semantic_models/fixtures.py create mode 100644 tests/functional/semantic_models/test_semantic_model_configs.py create mode 100644 tests/functional/semantic_models/test_semantic_model_parsing.py create mode 100644 tests/functional/semantic_models/test_semantic_models.py create mode 100644 tests/functional/show/fixtures.py create mode 100644 tests/functional/show/test_show.py create mode 100644 tests/functional/simple_snapshot/data/invalidate_postgres.sql create mode 100644 tests/functional/simple_snapshot/data/seed_pg.sql create mode 100644 tests/functional/simple_snapshot/data/shared_macros.sql create mode 100644 tests/functional/simple_snapshot/data/update.sql create mode 100644 tests/functional/simple_snapshot/fixtures.py create mode 100644 tests/functional/simple_snapshot/test_basic_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_changing_strategy_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_check_cols_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_comment_ending_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_cross_schema_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_hard_delete_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_long_text_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_missing_strategy_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_renamed_source_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_select_exclude_snapshot.py create mode 100644 tests/functional/simple_snapshot/test_slow_query_snapshot.py create mode 100644 tests/functional/source_overrides/fixtures.py create mode 100644 tests/functional/source_overrides/test_simple_source_override.py create mode 100644 tests/functional/source_overrides/test_source_overrides_duplicate_model.py create mode 100644 tests/functional/sources/common_source_setup.py create mode 100644 tests/functional/sources/data/seed.sql create mode 100644 tests/functional/sources/fixtures.py create mode 100644 tests/functional/sources/test_simple_source.py create mode 100644 tests/functional/sources/test_source_configs.py create mode 100644 tests/functional/sources/test_source_fresher_state.py create mode 100644 tests/functional/sources/test_source_freshness.py create mode 100644 tests/functional/statements/fixtures.py create mode 100644 tests/functional/statements/test_statements.py create mode 100644 tests/functional/test_access.py create mode 100644 tests/functional/test_analyses.py create mode 100644 tests/functional/test_catalog.py create mode 100644 tests/functional/test_clean.py create mode 100644 tests/functional/test_colors.py create mode 100644 tests/functional/test_column_quotes.py create mode 100644 tests/functional/test_config.py create mode 100644 tests/functional/test_connection_manager.py create mode 100644 tests/functional/test_custom_target_path.py create mode 100644 tests/functional/test_cycles.py create mode 100644 tests/functional/test_dbt_runner.py create mode 100644 tests/functional/test_default_selectors.py create mode 100644 tests/functional/test_events.py create mode 100644 tests/functional/test_experimental_parser.py create mode 100644 tests/functional/test_external_reference.py create mode 100644 tests/functional/test_fail_fast.py create mode 100644 tests/functional/test_init.py create mode 100644 tests/functional/test_ref_override.py create mode 100644 tests/functional/test_relation_name.py create mode 100644 tests/functional/test_selection/conftest.py create mode 100644 tests/functional/test_selection/test_selection_expansion.py create mode 100644 tests/functional/test_severity.py create mode 100644 tests/functional/test_store_test_failures.py create mode 100644 tests/functional/test_thread_count.py create mode 100644 tests/functional/test_timezones.py create mode 100644 tests/functional/test_types.py create mode 100644 tests/functional/test_unlogged_table.py create mode 100644 tests/functional/unit_testing/fixtures.py create mode 100644 tests/functional/unit_testing/test_csv_fixtures.py create mode 100644 tests/functional/unit_testing/test_state.py create mode 100644 tests/functional/unit_testing/test_unit_testing.py create mode 100644 tests/functional/unit_testing/test_ut_dependency.py create mode 100644 tests/functional/unit_testing/test_ut_sources.py create mode 100644 tests/functional/utils.py diff --git a/tests/functional/README.md b/tests/functional/README.md new file mode 100644 index 000000000..b5aa542ea --- /dev/null +++ b/tests/functional/README.md @@ -0,0 +1,14 @@ +# This is where we are putting the pytest conversions of test/integration + +# Goals of moving tests to pytest + * Readability + * Modularity + * Easier to create and debug + * Ability to create a project for external debugging + +# TODO + * Create the ability to export a project + * Explore using: + * https://github.com/pytest-docker-compose/pytest-docker-compose or + * https://github.com/avast/pytest-docker for automatically managing a postgres instance running in a docker container + * Track test coverage (https://pytest-cov.readthedocs.io/en/latest) diff --git a/tests/functional/__init__.py b/tests/functional/__init__.py new file mode 100644 index 000000000..f2b8d6b4c --- /dev/null +++ b/tests/functional/__init__.py @@ -0,0 +1,3 @@ +# Functional tests focus on the business requirements of an application. They +# only verify the output of an action and do not check the intermediate states +# of the system when performing that action. diff --git a/tests/functional/artifacts/data/results/v4/run_results.json b/tests/functional/artifacts/data/results/v4/run_results.json new file mode 100644 index 000000000..0767eb8e8 --- /dev/null +++ b/tests/functional/artifacts/data/results/v4/run_results.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v4.json", "dbt_version": "1.6.7", "generated_at": "2023-11-06T20:40:37.557735Z", "invocation_id": "42f85a60-4f7b-4cc1-a197-62687104fecc", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:40:37.486980Z", "completed_at": "2023-11-06T20:40:37.488837Z"}, {"name": "execute", "started_at": "2023-11-06T20:40:37.490290Z", "completed_at": "2023-11-06T20:40:37.539787Z"}], "thread_id": "Thread-9 (worker)", "execution_time": 0.0566411018371582, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.my_model"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:40:37.485334Z", "completed_at": "2023-11-06T20:40:37.489266Z"}, {"name": "execute", "started_at": "2023-11-06T20:40:37.494545Z", "completed_at": "2023-11-06T20:40:37.542811Z"}], "thread_id": "Thread-8 (worker)", "execution_time": 0.060118675231933594, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.metricflow_time_spine"}], "elapsed_time": 0.18144583702087402, "args": {"defer": false, "indirect_selection": "eager", "select": [], "log_level_file": "debug", "use_colors": true, "cache_selected_only": false, "strict_mode": false, "use_colors_file": true, "partial_parse_file_diff": true, "static_parser": true, "write_json": true, "warn_error_options": {"include": [], "exclude": []}, "print": true, "log_level": "info", "profiles_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-16/profile0", "log_path": "/Users/jerco/dev/product/dbt-core/logs/test16993032361853467608", "partial_parse": true, "quiet": false, "log_format_file": "debug", "version_check": true, "send_anonymous_usage_stats": false, "project_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-16/project0", "log_format": "default", "enable_legacy_logger": false, "exclude": [], "populate_cache": true, "log_file_max_bytes": 10485760, "macro_debugging": false, "printer_width": 80, "invocation_command": "dbt tests/functional/artifacts/test_previous_version_state.py::TestPreviousVersionState", "which": "run", "favor_state": false, "introspect": true, "vars": {}}} diff --git a/tests/functional/artifacts/data/results/v5/run_results.json b/tests/functional/artifacts/data/results/v5/run_results.json new file mode 100644 index 000000000..63a7a58ea --- /dev/null +++ b/tests/functional/artifacts/data/results/v5/run_results.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v5.json", "dbt_version": "1.8.0a1", "generated_at": "2023-11-06T20:43:08.231028Z", "invocation_id": "a9238a29-6764-47f0-ba7d-f7d61ae5e6c0", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:43:08.146847Z", "completed_at": "2023-11-06T20:43:08.149862Z"}, {"name": "execute", "started_at": "2023-11-06T20:43:08.151676Z", "completed_at": "2023-11-06T20:43:08.206208Z"}], "thread_id": "Thread-9 (worker)", "execution_time": 0.06433510780334473, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.my_model", "compiled": true, "compiled_code": "select 1 as id", "relation_name": "\"dbt\".\"test16993033859513627134_test_previous_version_state\".\"my_model\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:43:08.144982Z", "completed_at": "2023-11-06T20:43:08.150320Z"}, {"name": "execute", "started_at": "2023-11-06T20:43:08.155222Z", "completed_at": "2023-11-06T20:43:08.209881Z"}], "thread_id": "Thread-8 (worker)", "execution_time": 0.06822013854980469, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.metricflow_time_spine", "compiled": true, "compiled_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "relation_name": "\"dbt\".\"test16993033859513627134_test_previous_version_state\".\"metricflow_time_spine\""}], "elapsed_time": 0.18284392356872559, "args": {"send_anonymous_usage_stats": false, "profiles_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-19/profile0", "static_parser": true, "partial_parse_file_diff": true, "printer_width": 80, "log_level_file": "debug", "project_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-19/project0", "log_format": "default", "strict_mode": false, "macro_debugging": false, "indirect_selection": "eager", "version_check": true, "use_colors_file": true, "select": [], "log_file_max_bytes": 10485760, "warn_error_options": {"include": [], "exclude": []}, "log_format_file": "debug", "invocation_command": "dbt tests/functional/artifacts/test_previous_version_state.py::TestPreviousVersionState", "write_json": true, "log_level": "info", "cache_selected_only": false, "quiet": false, "favor_state": false, "enable_legacy_logger": false, "log_path": "/Users/jerco/dev/product/dbt-core/logs/test16993033859513627134", "which": "run", "partial_parse": true, "introspect": true, "show_resource_report": false, "exclude": [], "populate_cache": true, "vars": {}, "use_colors": true, "defer": false, "print": true}} diff --git a/tests/functional/artifacts/data/results/v6/run_results.json b/tests/functional/artifacts/data/results/v6/run_results.json new file mode 100644 index 000000000..f78176c93 --- /dev/null +++ b/tests/functional/artifacts/data/results/v6/run_results.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v6.json", "dbt_version": "1.8.0a1", "generated_at": "2023-12-06T18:53:19.641690Z", "invocation_id": "ad4ef714-e6c6-425e-b7c8-c1c4369df4ea", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-12-06T18:53:19.554953Z", "completed_at": "2023-12-06T18:53:19.559711Z"}, {"name": "execute", "started_at": "2023-12-06T18:53:19.564874Z", "completed_at": "2023-12-06T18:53:19.620151Z"}], "thread_id": "Thread-8", "execution_time": 0.06995701789855957, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.metricflow_time_spine", "compiled": true, "compiled_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "relation_name": "\"dbt\".\"test17018887966812726006_test_previous_version_state\".\"metricflow_time_spine\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-12-06T18:53:19.557019Z", "completed_at": "2023-12-06T18:53:19.559247Z"}, {"name": "execute", "started_at": "2023-12-06T18:53:19.561000Z", "completed_at": "2023-12-06T18:53:19.622080Z"}], "thread_id": "Thread-9", "execution_time": 0.07100677490234375, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.my_model", "compiled": true, "compiled_code": "select 1 as id", "relation_name": "\"dbt\".\"test17018887966812726006_test_previous_version_state\".\"my_model\""}], "elapsed_time": 0.13903093338012695, "args": {"print": true, "log_level_file": "debug", "quiet": false, "warn_error_options": {"include": [], "exclude": []}, "write_json": true, "invocation_command": "dbt --cov=core --cov-append --cov-report=xml tests/functional/artifacts/test_previous_version_state.py", "log_level": "info", "select": [], "project_dir": "/private/var/folders/67/r0f0jlj54h95zl3fhmb217jh0000gp/T/pytest-of-william/pytest-68/project0", "static_parser": true, "log_file_max_bytes": 10485760, "empty": false, "introspect": true, "log_format_file": "debug", "vars": {}, "strict_mode": false, "indirect_selection": "eager", "show_resource_report": false, "favor_state": false, "version_check": true, "cache_selected_only": false, "enable_legacy_logger": false, "partial_parse": true, "profiles_dir": "/private/var/folders/67/r0f0jlj54h95zl3fhmb217jh0000gp/T/pytest-of-william/pytest-68/profile0", "defer": false, "printer_width": 80, "send_anonymous_usage_stats": false, "use_colors": true, "log_path": "/Users/william/git/dbt-core/logs/test17018887966812726006", "partial_parse_file_diff": true, "populate_cache": true, "macro_debugging": false, "use_colors_file": true, "log_format": "default", "which": "run", "exclude": []}} diff --git a/tests/functional/artifacts/data/state/v1/manifest.json b/tests/functional/artifacts/data/state/v1/manifest.json new file mode 100644 index 000000000..2811b7456 --- /dev/null +++ b/tests/functional/artifacts/data/state/v1/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v1.json", "dbt_version": "0.19.2", "generated_at": "2022-06-08T05:12:57.550908Z", "invocation_id": "57566e21-fbd4-4848-87ca-d05ddbd9012e", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "materialized": "view", "persist_docs": {}, "vars": {}, "quoting": {}, "column_types": {}, "alias": null, "schema": null, "database": null, "tags": [], "full_refresh": null, "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}}}, "sources": {}, "macros": {"macro.test.drop_relation": {"unique_id": "macro.test.drop_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(dbt_labs_materialized_views.drop_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.test.postgres__list_relations_without_caching": {"unique_id": "macro.test.postgres__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.postgres__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.test.postgres_get_relations": {"unique_id": "macro.test.postgres_get_relations", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(dbt_labs_materialized_views.postgres_get_relations()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres_get_relations"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.test.redshift__list_relations_without_caching": {"unique_id": "macro.test.redshift__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "redshift__list_relations_without_caching", "macro_sql": "{% macro redshift__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.redshift__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.redshift__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.test.load_relation": {"unique_id": "macro.test.load_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(dbt_labs_materialized_views.redshift_load_relation_or_mv(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence = 'p' -- [p]ermanent table. Other values are [u]nlogged table, [t]emporary table\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% for column_name in column_dict %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.column_list": {"unique_id": "macro.dbt.column_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list", "macro_sql": "{% macro column_list(columns) %}\n {%- for col in columns %}\n {{ col.name }} {% if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.column_list_for_create_table": {"unique_id": "macro.dbt.column_list_for_create_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list_for_create_table", "macro_sql": "{% macro column_list_for_create_table(columns) %}\n {%- for col in columns %}\n {{ col.name }} {{ col.data_type }} {%- if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n ;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/fishtown-analytics/dbt/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = snapshot_string_as_time(now) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.run_query", "macro.dbt.snapshot_string_as_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_schema", "macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.basic_load_csv_rows": {"unique_id": "macro.dbt.basic_load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "basic_load_csv_rows", "macro_sql": "{% macro basic_load_csv_rows(model, batch_size, agate_table) %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n %s\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_seed_column_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n {{ return(basic_load_csv_rows(model, 10000, agate_table) )}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.basic_load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.incremental_upsert": {"unique_id": "macro.dbt.incremental_upsert", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/helpers.sql", "original_file_path": "macros/materializations/incremental/helpers.sql", "name": "incremental_upsert", "macro_sql": "{% macro incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name=\"main\") %}\n {%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') -%}\n\n {%- if unique_key is not none -%}\n delete\n from {{ target_relation }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ tmp_relation }}\n );\n {%- endif %}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ tmp_relation }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/incremental.sql", "original_file_path": "macros/materializations/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(this) %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% elif existing_relation.is_view or should_full_refresh() %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set backup_identifier = existing_relation.identifier ~ \"__dbt_backup\" %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n {% do adapter.drop_relation(backup_relation) %}\n\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.load_relation", "macro.dbt.make_temp_relation", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.should_full_refresh", "macro.dbt.run_query", "macro.dbt.incremental_upsert", "macro.dbt.statement", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column in dest_columns -%}\n {{ adapter.quote(column.name) }} = DBT_INTERNAL_SOURCE.{{ adapter.quote(column.name) }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.common_get_delete_insert_merge_sql": {"unique_id": "macro.dbt.common_get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "common_get_delete_insert_merge_sql", "macro_sql": "{% macro common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n );\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.common_get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/table/table.sql", "original_file_path": "macros/materializations/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_table_as(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/view.sql", "original_file_path": "macros/materializations/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch(\"handle_existing_table\", packages=['dbt'])(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view(run_outside_transaction_hooks=True) %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {% if run_outside_transaction_hooks %}\n -- no transactions on BigQuery\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n {% endif %}\n\n -- `BEGIN` happens here on Snowflake\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if run_outside_transaction_hooks %}\n -- No transactions on BigQuery\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n {% endif %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_alias.sql", "original_file_path": "macros/etc/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/query.sql", "original_file_path": "macros/etc/query.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/is_incremental.sql", "original_file_path": "macros/etc/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, to, field) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('from')) %}\n\n\nselect count(*) as validation_errors\nfrom (\n select {{ column_name }} as id from {{ model }}\n) as child\nleft join (\n select {{ field }} as id from {{ to }}\n) as parent on parent.id = child.id\nwhere child.id is not null\n and parent.id is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "test_relationships", "macro_sql": "{% macro test_relationships(model, to, field) %}\n {% set macro = adapter.dispatch('test_relationships') %}\n {{ macro(model, to, field, **kwargs) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n\nselect count(*) as validation_errors\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "test_not_null", "macro_sql": "{% macro test_not_null(model) %}\n {% set macro = adapter.dispatch('test_not_null') %}\n {{ macro(model, **kwargs) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n\nselect count(*) as validation_errors\nfrom (\n\n select\n {{ column_name }}\n\n from {{ model }}\n where {{ column_name }} is not null\n group by {{ column_name }}\n having count(*) > 1\n\n) validation_errors\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "test_unique", "macro_sql": "{% macro test_unique(model) %}\n {% set macro = adapter.dispatch('test_unique') %}\n {{ macro(model, **kwargs) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, values) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('field')) %}\n{% set quote_values = kwargs.get('quote', True) %}\n\nwith all_values as (\n\n select distinct\n {{ column_name }} as value_field\n\n from {{ model }}\n\n),\n\nvalidation_errors as (\n\n select\n value_field\n\n from all_values\n where value_field not in (\n {% for value in values -%}\n {% if quote_values -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n )\n)\n\nselect count(*) as validation_errors\nfrom validation_errors\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "test_accepted_values", "macro_sql": "{% macro test_accepted_values(model, values) %}\n {% set macro = adapter.dispatch('test_accepted_values') %}\n {{ macro(model, values, **kwargs) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--models` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/overview)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [chat](https://community.getdbt.com/) on Slack for live questions and support."}}, "exposures": {}, "selectors": {}, "disabled": [], "parent_map": {"model.test.my_model": []}, "child_map": {"model.test.my_model": []}} diff --git a/tests/functional/artifacts/data/state/v10/manifest.json b/tests/functional/artifacts/data/state/v10/manifest.json new file mode 100644 index 000000000..9c73cf337 --- /dev/null +++ b/tests/functional/artifacts/data/state/v10/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v10.json", "dbt_version": "1.6.6", "generated_at": "2023-10-11T20:49:37.080431Z", "invocation_id": "e2f630c5-769a-47a2-89ce-294a00e14e1a", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.543413, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.456355, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16970573770617803847_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16970573770617803847_test_previous_version_state"}, "created_at": 1697057377.471309, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.492032, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1697057377.508335, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.525708, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/79/5290gpvn3lx5jdryk4844rm80000gn/T/pytest-of-quigleymalcolm/pytest-271/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.552852, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.553834, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1697057377.594166}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.099874, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1000938, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1002848, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1005828, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.10079, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1009028, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.101016, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.101125, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1022131, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1028638, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1119502, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.112461, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.112787, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.113112, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.113596, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.114043, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.114221, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1145759, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1149912, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1158679, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1160781, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.116409, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.116695, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.117132, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.117368, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.117985, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118195, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118315, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118505, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118647, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_materialized_view_as_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.120726, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.121023, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql": {"name": "postgres__get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{- get_create_materialized_view_as_sql(intermediate_relation, sql) -}}\n\n {% if existing_relation is not none %}\n alter materialized view {{ existing_relation }} rename to {{ backup_relation.include(database=False, schema=False) }};\n {% endif %}\n\n alter materialized view {{ intermediate_relation }} rename to {{ relation.include(database=False, schema=False) }};\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.121473, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.121773, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1218822, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.122449, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.122707, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.123094, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.123816, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.124038, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.124678, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.127991, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.128149, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1286578, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.129076, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.130199, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.130403, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.130552, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1307, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1308448, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.131226, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.131537, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1318662, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.13231, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.132591, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.136333, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.13651, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1367402, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1374788, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.13765, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.137829, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1392791, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.140613, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.144871, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.14516, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.145334, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1454248, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1455739, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.145694, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.145904, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.146819, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1470149, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.147279, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.147718, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.154073, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.156199, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.156668, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.156987, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.157378, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.157763, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.159425, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1599932, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.160686, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1609302, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1617038, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.168492, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.170215, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.170489, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.171521, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.171801, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.172469, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.173121, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.17403, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174277, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174471, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174779, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174974, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.17528, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.175472, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.175742, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.175937, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.176092, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.176376, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1815941, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1874628, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.188724, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.189962, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1908412, "supported_languages": null}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.196028, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1964319, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.196692, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_materialized_view_as_sql(target_relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_materialized_view_as_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1980631, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.198303, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.198978, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "original_file_path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"<change_category>\": [{\"action\": \"<name>\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.19941, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "original_file_path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.19956, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.200053, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.200245, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2005591, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2006972, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_as_sql": {"name": "get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "unique_id": "macro.dbt.get_replace_materialized_view_as_sql", "macro_sql": "{% macro get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{- log('Applying REPLACE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_replace_materialized_view_as_sql', 'dbt')(relation, sql, existing_relation, backup_relation, intermediate_relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2011251, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_as_sql": {"name": "default__get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_as_sql", "macro_sql": "{% macro default__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2013052, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.201658, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.201805, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202045, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202231, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202538, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202677, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2059531, "supported_languages": ["sql"]}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2070122, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2071402, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.207693, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2078662, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2080052, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.209325, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.209718, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.210073, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2131069, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.214011, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2142022, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.214653, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2153769, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2158551, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2160509, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.216245, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.219257, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.219564, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.219792, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2213418, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.221772, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2219388, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.222131, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.222568, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.225961, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.231348, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.232269, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.232512, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233001, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2331991, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233336, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233479, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233599, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233762, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233883, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2343712, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2345622, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.236012, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2364511, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.236854, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.237386, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.237652, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.237949, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.238351, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.238611, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.238965, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2392662, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.239514, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2400918, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.241607, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.242203, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2425091, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2444658, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.245748, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.246516, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2467651, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2469969, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.247075, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2474089, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2475772, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.247827, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.247957, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.248211, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.248318, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2486641, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.248833, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2490602, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2491379, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.249404, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2495492, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.249847, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.249984, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2506409, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.251061, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.251404, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.251571, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.25186, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252075, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252336, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252498, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2527459, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252909, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.25316, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.253269, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2535648, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2537038, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.253951, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.254057, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.254996, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2551548, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2553222, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.255476, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.255645, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2558029, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.255967, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256154, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256317, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256474, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256723, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2568839, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.257049, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2572002, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2574809, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.257617, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2578712, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.257977, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.258348, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.258624, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.258776, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259321, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259488, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259715, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259997, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.260129, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.26051, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.260766, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2610502, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261183, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261564, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261753, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261918, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.262178, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2626739, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.262827, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.262975, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.263083, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.263253, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.26333, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2635038, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.263676, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.264548, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.264692, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.264854, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265263, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265459, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265602, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265763, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265894, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.268777, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.268948, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.269174, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.269548, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.269803, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270133, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270325, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2704918, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270706, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270998, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.271255, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.271829, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.27207, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.27222, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.272419, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.272854, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.273275, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2735639, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.273793, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.275553, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2756748, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2758532, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.275969, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.276321, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.276514, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2766201, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.276926, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.277123, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.277358, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2775512, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.277785, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.278485, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.278682, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2789361, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2791739, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2803478, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2808928, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.281089, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.281228, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.281921, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2820952, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.282303, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.282475, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.282752, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.283254, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2848241, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.285086, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2852778, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.285444, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.285694, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.28595, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2861598, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2864761, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.286675, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2868428, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2877948, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {%- if relation.is_table -%}\n {{- drop_table(relation) -}}\n {%- elif relation.is_view -%}\n {{- drop_view(relation) -}}\n {%- elif relation.is_materialized_view -%}\n {{- drop_materialized_view(relation) -}}\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endif -%}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.drop_table", "macro.dbt.drop_view", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.28826, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288448, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288556, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288737, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288845, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.289025, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.289133, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.291446, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2916129, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2919302, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2921631, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.292375, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.292562, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {{ col_err.append(col['name']) }}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.293418, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2937758, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2939641, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.294328, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2945652, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.295179, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.295451, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.296254, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.29796, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2981188, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.298965, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.299389, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.299982, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.30047, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.3005419, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.3010602, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.3013, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.301594, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.301873, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1697057377.578206}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1697057377.583621, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1697057377.5840042, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1697057377.585288, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}, "alias": null, "offset_window": null, "offset_to_grain": null}]}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1697057377.5861351, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.4547698, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16970573770617803847_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16970573770617803847_test_previous_version_state", "enabled": false}, "created_at": 1697057377.4774349, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16970573770617803847_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.489575, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.5060952, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.558094, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1697057377.5790222}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1697057377.584552, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.5646772, "config_call_dict": {}, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/79/5290gpvn3lx5jdryk4844rm80000gn/T/pytest-of-quigleymalcolm/pytest-271/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1697057377.5942788}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test16970573770617803847_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1697057377.5929039, "config": {"enabled": true}, "primary_entity": null}}} diff --git a/tests/functional/artifacts/data/state/v11/manifest.json b/tests/functional/artifacts/data/state/v11/manifest.json new file mode 100644 index 000000000..2d57234cf --- /dev/null +++ b/tests/functional/artifacts/data/state/v11/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v11.json", "dbt_version": "1.8.0a1", "generated_at": "2023-12-05T16:53:23.890718Z", "invocation_id": "6ab55e79-96b3-4825-ad9f-e1d1da5a1ba3", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.698763, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795201.711199, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17017951992510102999_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17017951992510102999_test_previous_version_state"}, "created_at": 1701795201.845236, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.01357, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1701795202.213242, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.463294, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/jk/wc60sy6551568b9mkw_01h9r0000gn/T/pytest-of-emily/pytest-179/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.822232, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.8247292, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1701795203.1697412}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6199858, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.620511, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6209228, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.621485, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.621939, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.622278, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6225321, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6227798, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.625891, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6267462, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.627949, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.628255, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6437912, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.64499, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.645702, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6464581, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.647518, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.648478, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.64887, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6498601, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.651049, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.653446, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.653924, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.654655, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.655276, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.656255, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6567712, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.658122, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.658636, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.658905, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.659279, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.659605, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.660476, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.661935, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.662252, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.66293, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6632478, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.663639, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.665281, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.666545, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.667615, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6688828, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.669285, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.67079, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.671197, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.671505, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.672849, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.67325, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.673802, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6751292, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.682186, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.682699, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.684223, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.685657, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6880362, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6885052, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.688853, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.689187, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6895208, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.69028, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.69104, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6918972, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.692802, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.693482, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.700966, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7015522, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.702199, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.70386, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.704243, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.704649, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.707978, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.711052, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.717665, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7189279, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.719371, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.719593, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7199302, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7201998, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.720671, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7225971, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7230332, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.723613, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.724629, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.738543, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7443469, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7464151, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.747135, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.74761, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as actual_or_expected\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as actual_or_expected\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.748797, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.749613, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.750813, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n {%- endfor -%}\n\n {% set unit_test_sql = get_unit_test_sql(sql, get_expected_sql(expected_rows, column_name_to_data_types), tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.get_expected_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.754615, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7630222, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.763915, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.764492, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.768254, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.769085, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.770625, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.776339, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.782128, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.785681, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.786989, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.788492, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.789021, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.790645, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8009338, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.805187, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.805814, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.808223, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8088698, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.810357, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.811747, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.81349, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8140302, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.814534, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.815238, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8156958, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.816755, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.817258, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8179939, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.818708, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.819102, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8197608, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.830284, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.839777, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.842585, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8455942, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.847949, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8489118, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8492649, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8502321, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8505979, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.858089, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.864974, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.873394, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.875537, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.876073, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.877249, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.877687, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.877996, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.878316, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.878582, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8789499, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8792262, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.880359, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8807812, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.884084, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8857841, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.887074, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8884811, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8898969, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8908532, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.892451, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8935628, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8955379, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8970492, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.897825, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8986602, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8993979, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9021928, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.906515, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9075258, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.908553, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9093351, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9099782, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9110131, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9116, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.913279, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.914389, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.914904, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.915583, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.916468, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.917393, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.918984, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.920189, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9210382, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9216352, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9222558, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.922518, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.923136, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.923624, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9247959, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.92559, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.926479, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.926847, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.928088, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9285948, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"<change_category>\": [{\"action\": \"<name>\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9293408, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.930051, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.931433, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.932239, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.934562, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9352589, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.93693, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.937391, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.937782, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.940798, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.942014, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.942831, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.943433, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.943681, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9442549, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9447072, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.945371, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.945725, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.948194, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.94883, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9502022, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.952827, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.954375, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.95489, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.955314, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.955926, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.956181, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.957737, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.958209, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.960974, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9614248, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.962048, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9626722, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.963051, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.96443, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.965119, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.966122, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.967995, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9693792, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9701362, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9706838, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.971901, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.974993, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.976295, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.976956, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.98135, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.985949, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.987921, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.988501, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.989037, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9892218, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.990579, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9921122, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.99267, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9934888, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.994245, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.994626, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.995157, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.995448, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.997557, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.999308, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.000303, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.002402, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.003065, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.003328, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0040388, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.004419, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0049028, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.005106, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.00577, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.006273, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0068932, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.007206, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.008418, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0093498, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.010061, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.010436, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.011046, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.011369, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0119638, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.012336, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.012935, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.013579, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0144289, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.014882, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0164359, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.017073, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.018199, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.018629, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.020689, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.021079, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0216, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.021996, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.022377, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.022727, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0231, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0235069, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0238762, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.02422, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.02458, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0250869, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.025496, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.025868, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0265422, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.026857, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0273962, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.027641, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.028342, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0289578, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0293171, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.030658, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.031221, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.032276, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.033396, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.033823, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0352662, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0362182, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.037148, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.037907, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0390232, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.039777, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.040294, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.040722, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.041665, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.042021, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0423522, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.042599, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0429802, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.043158, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.043534, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0439239, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.045585, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0459142, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.046274, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.047541, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.048099, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0486348, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0491462, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.049642, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.053334, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.053748, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0542579, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.054944, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0555499, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.056275, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.056695, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.057075, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.057626, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.059056, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.059603, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.059943, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.060834, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.061783, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.062391, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.062944, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.066784, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.06715, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.067581, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0679018, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.069325, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.069806, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0700648, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.070583, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.071019, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0715342, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.071965, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0724852, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0740328, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0744698, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0752409, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.075805, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.078434, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.079551, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.07999, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0803041, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.081865, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.082284, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.082762, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0831811, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.08413, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.085705, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.090453, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.091082, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.09168, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.092282, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.092702, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.093062, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.093466, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.094017, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.094474, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.095205, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.095628, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0959911, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0963771, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.096725, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0972311, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.097627, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.102133, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.102572, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.103314, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.103957, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.104427, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.104844, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.107417, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1082602, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.10869, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.10948, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.10999, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1114, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.111994, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.11379, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this) -%}\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this) }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.120623, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * FROM dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.122299, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n\n{#-- wrap yaml strings in quotes, apply cast --#}\n{%- for column_name, column_value in row.items() -%}\n{% set row_update = {column_name: column_value} %}\n{%- if column_value is string -%}\n{%- set row_update = {column_name: safe_cast(dbt.string_literal(column_value), column_name_to_data_types[column_name]) } -%}\n{%- elif column_value is none -%}\n{%- set row_update = {column_name: safe_cast('null', column_name_to_data_types[column_name]) } -%}\n{%- else -%}\n{%- set row_update = {column_name: safe_cast(column_value, column_name_to_data_types[column_name]) } -%}\n{%- endif -%}\n{%- do row.update(row_update) -%}\n{%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.124015, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1272519, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.127626, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.129449, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1304069, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.131742, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.132842, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.133022, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1345239, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.135215, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.136147, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.136801, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1701795203.004519}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_filters": [{"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1701795203.088031, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1701795203.08911, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1701795203.0918, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}]}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1701795203.093819, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795201.707621, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17017951992510102999_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17017951992510102999_test_previous_version_state", "enabled": false}, "created_at": 1701795201.899797, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17017951992510102999_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795202.007819, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795202.207177, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795202.8354402, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1701795203.007275}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1701795203.090345, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795202.8690538, "config_call_dict": {}, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/jk/wc60sy6551568b9mkw_01h9r0000gn/T/pytest-of-emily/pytest-179/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1701795203.1703029}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test17017951992510102999_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1701795203.164725, "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "primary_entity": null, "group": null}}, "unit_tests": {}} diff --git a/tests/functional/artifacts/data/state/v12/manifest.json b/tests/functional/artifacts/data/state/v12/manifest.json new file mode 100644 index 000000000..d0ec33487 --- /dev/null +++ b/tests/functional/artifacts/data/state/v12/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": "1.8.0a1", "generated_at": "2023-12-13T17:51:37.252335Z", "invocation_id": "ea31128b-c8be-4ccf-806a-112748d83b11", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.4497569, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.062557, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17024898921033785545_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17024898921033785545_test_previous_version_state"}, "created_at": 1702489893.131624, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.215913, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1702489893.278812, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.396907, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/1j/l_jt_2w16t5dnplmd2n0cr880000gq/T/pytest-of-gerda/pytest-106/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.496192, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.4976692, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1702489893.6685581}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.459133, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.459455, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.45973, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.460128, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4604428, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.460591, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.460742, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4608908, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.462921, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.463424, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4643211, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.464503, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4763231, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.477042, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4774752, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.477914, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.478566, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.479178, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.479415, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4798899, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4811032, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4822998, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.482568, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.483017, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.483407, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.484009, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.484319, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.48519, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4854872, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.485644, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.485892, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.486088, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.486612, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.487571, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.487763, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4881668, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.488352, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.488592, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.489763, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4905322, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.490937, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4914439, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.491634, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4926, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4928472, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4930282, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4938009, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4940412, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4943411, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4951968, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4995358, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.499748, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.500465, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.501039, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5025449, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.502815, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503018, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503212, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503405, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503903, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.504405, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.504831, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.505419, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.505802, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.510586, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.510832, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5111418, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5121439, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.512374, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.512612, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5145621, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.516487, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.521842, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522252, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5224829, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522608, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522808, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522967, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.523251, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.524491, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.524776, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.525128, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5257301, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.534119, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.537909, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5393128, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.539742, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5400288, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as actual_or_expected\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as actual_or_expected\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5407722, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.541282, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.541804, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n {%- endfor -%}\n\n {% set unit_test_sql = get_unit_test_sql(sql, get_expected_sql(expected_rows, column_name_to_data_types), tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.get_expected_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.543893, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.550401, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5509398, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.551308, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.55323, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.553552, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.554471, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.558391, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.56234, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.564464, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.565242, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.566174, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5665019, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5675159, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5758579, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.578167, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5785348, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.579946, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.58032, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.581223, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.58209, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.583271, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5836082, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5838752, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.584299, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.584571, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5850089, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5852711, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.585648, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.585918, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5861251, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5865128, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5934541, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6008031, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.602528, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6041899, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6053782, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.605706, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.605867, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.606281, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.606463, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.611434, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.615864, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6225102, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.623789, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6241221, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.624792, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.625057, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.625251, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6254442, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.625603, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6258318, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6259909, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.626668, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.62693, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.628712, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.629285, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.629807, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.63053, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6308942, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.631285, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.631825, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6321821, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.633137, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6336472, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.633909, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.63418, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.634453, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6355321, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6373062, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6378188, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.638164, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.63853, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.638835, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.639267, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.639553, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6404989, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.641108, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6413922, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.641782, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6422558, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.642634, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.643284, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.643911, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6443572, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.64466, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.645021, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.645167, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6455371, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.645806, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.646227, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.646407, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.646778, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6469781, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.64781, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6480699, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"<change_category>\": [{\"action\": \"<name>\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.648458, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6486568, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6490319, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.649228, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.650592, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.650763, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.65154, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.651774, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.651969, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6539361, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.654461, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.654939, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.655298, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6554399, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.655802, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.656011, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.656379, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.656577, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6577091, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.65796, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.658557, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.659515, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.660146, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6604002, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6606479, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.661003, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6611462, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.662261, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.662462, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.664104, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6643698, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.664682, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.66505, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.665251, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.665806, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.666029, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.666276, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.666858, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6673222, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.667722, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6680498, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6688168, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.670816, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6716192, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6720269, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6744618, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.676194, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.677233, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6775522, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.677862, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.677966, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.678944, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6797628, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.68008, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.680588, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.681036, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.68126, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.68159, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.681758, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6828642, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.683444, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.683711, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.684439, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.684801, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6849449, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.685508, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.685786, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.686151, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.686363, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6867359, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.686934, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6873422, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.687535, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.688407, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.688978, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6894479, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6896799, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690075, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690288, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690662, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690897, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691238, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691461, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691802, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691951, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6923468, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6925418, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.692876, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.693096, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.694456, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.694669, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.694897, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6951098, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.695392, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.695667, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6958919, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696158, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696384, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696604, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696844, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.697058, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6972768, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6974878, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6978838, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.698067, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.698406, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6985521, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.699027, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.699476, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.699679, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.700398, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.700626, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.700948, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.701324, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70152, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.702045, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7023842, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7027788, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.702966, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.703469, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.703722, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7039502, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.704203, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.704849, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70506, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7052631, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70541, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7056398, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.705746, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.706052, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.706282, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.707399, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.707598, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.707818, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7083972, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7086651, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.708859, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.709079, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70926, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.711892, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.712121, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.712429, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.712831, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.713176, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.713624, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.713892, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.714118, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.714466, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.715341, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.715683, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.715879, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.716444, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.717024, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7174232, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.717751, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.720056, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7202182, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7204502, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.720602, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.721077, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.721333, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7214751, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7217898, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7220511, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7223868, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.722655, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.722969, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7240279, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7242901, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.724632, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7249548, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.726592, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.727339, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.727602, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.727795, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7286851, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7289228, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.729202, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.729436, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7298071, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.730485, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.734237, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.734608, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.734891, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.735255, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7355149, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.735737, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7359931, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7364202, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7367098, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.737128, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7373838, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.737608, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7378361, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.73805, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.738337, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.738574, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.741552, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.741784, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.742213, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.742526, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.74281, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7430599, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.744768, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.745255, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.745512, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.745993, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7463129, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7471611, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.747524, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.748637, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this) -%}\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this) }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7529018, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * FROM dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.753653, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n\n{#-- wrap yaml strings in quotes, apply cast --#}\n{%- for column_name, column_value in row.items() -%}\n{% set row_update = {column_name: column_value} %}\n{%- if column_value is string -%}\n{%- set row_update = {column_name: safe_cast(dbt.string_literal(column_value), column_name_to_data_types[column_name]) } -%}\n{%- elif column_value is none -%}\n{%- set row_update = {column_name: safe_cast('null', column_name_to_data_types[column_name]) } -%}\n{%- else -%}\n{%- set row_update = {column_name: safe_cast(column_value, column_name_to_data_types[column_name]) } -%}\n{%- endif -%}\n{%- do row.update(row_update) -%}\n{%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7547069, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.756869, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7570791, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.758251, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7588322, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.75966, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7603211, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7604249, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.761129, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.761452, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7618601, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7623239, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1702489893.579107}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1702489893.629962, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1702489893.63043, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1702489893.632082, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1702489893.633114, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.059992, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17024898921033785545_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17024898921033785545_test_previous_version_state", "enabled": false}, "created_at": 1702489893.159178, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17024898921033785545_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.2125812, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.275594, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.503576, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1702489893.5801542}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1702489893.631113, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.520305, "config_call_dict": {}, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/1j/l_jt_2w16t5dnplmd2n0cr880000gq/T/pytest-of-gerda/pytest-106/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1702489893.668709}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test17024898921033785545_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1702489893.6660612, "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "primary_entity": null, "group": null}}, "unit_tests": {}} diff --git a/tests/functional/artifacts/data/state/v2/manifest.json b/tests/functional/artifacts/data/state/v2/manifest.json new file mode 100644 index 000000000..25532471a --- /dev/null +++ b/tests/functional/artifacts/data/state/v2/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v2.json", "dbt_version": "0.20.2", "generated_at": "2022-06-08T05:12:43.870174Z", "invocation_id": "b9b21a26-1804-47f9-866b-620501fe5540", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "materialized": "view", "persist_docs": {}, "vars": {}, "quoting": {}, "column_types": {}, "alias": null, "schema": null, "database": null, "tags": [], "full_refresh": null, "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1654665164}}, "sources": {}, "macros": {"macro.test.drop_relation": {"unique_id": "macro.test.drop_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(dbt_labs_materialized_views.drop_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.test.postgres__list_relations_without_caching": {"unique_id": "macro.test.postgres__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.postgres__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.test.postgres_get_relations": {"unique_id": "macro.test.postgres_get_relations", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(dbt_labs_materialized_views.postgres_get_relations()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres_get_relations"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.test.redshift__list_relations_without_caching": {"unique_id": "macro.test.redshift__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "redshift__list_relations_without_caching", "macro_sql": "{% macro redshift__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.redshift__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.redshift__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.test.load_relation": {"unique_id": "macro.test.load_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(dbt_labs_materialized_views.redshift_load_relation_or_mv(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence = 'p' -- [p]ermanent table. Other values are [u]nlogged table, [t]emporary table\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% for column_name in column_dict %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "materialization_test_default", "macro_sql": "\n\n{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n \n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n \n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n \n {% do relations.append(target_relation) %}\n \n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n \n {{ adapter.commit() }}\n \n {% else %}\n\n {% set main_sql = sql %}\n \n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n \n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.column_list": {"unique_id": "macro.dbt.column_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list", "macro_sql": "{% macro column_list(columns) %}\n {%- for col in columns %}\n {{ col.name }} {% if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.column_list_for_create_table": {"unique_id": "macro.dbt.column_list_for_create_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list_for_create_table", "macro_sql": "{% macro column_list_for_create_table(columns) %}\n {%- for col in columns %}\n {{ col.name }} {{ col.data_type }} {%- if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n ;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/fishtown-analytics/dbt/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = config.get('updated_at', snapshot_string_as_time(now)) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.run_query", "macro.dbt.snapshot_string_as_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_schema", "macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.basic_load_csv_rows": {"unique_id": "macro.dbt.basic_load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "basic_load_csv_rows", "macro_sql": "{% macro basic_load_csv_rows(model, batch_size, agate_table) %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n %s\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_seed_column_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n {{ return(basic_load_csv_rows(model, 10000, agate_table) )}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.basic_load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.incremental_upsert": {"unique_id": "macro.dbt.incremental_upsert", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/helpers.sql", "original_file_path": "macros/materializations/incremental/helpers.sql", "name": "incremental_upsert", "macro_sql": "{% macro incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name=\"main\") %}\n {%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') -%}\n\n {%- if unique_key is not none -%}\n delete\n from {{ target_relation }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ tmp_relation }}\n );\n {%- endif %}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ tmp_relation }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/incremental.sql", "original_file_path": "macros/materializations/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% elif existing_relation.is_view or should_full_refresh() %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% do adapter.drop_relation(intermediate_relation) %}\n {% do adapter.drop_relation(backup_relation) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %} \n {% do adapter.rename_relation(target_relation, backup_relation) %} \n {% do adapter.rename_relation(intermediate_relation, target_relation) %} \n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.load_relation", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.should_full_refresh", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.incremental_upsert", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.common_get_delete_insert_merge_sql": {"unique_id": "macro.dbt.common_get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "common_get_delete_insert_merge_sql", "macro_sql": "{% macro common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n );\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.common_get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/table/table.sql", "original_file_path": "macros/materializations/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_table_as(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.create_indexes", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/view.sql", "original_file_path": "macros/materializations/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', macro_namespace = 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view(run_outside_transaction_hooks=True) %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {% if run_outside_transaction_hooks %}\n -- no transactions on BigQuery\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n {% endif %}\n\n -- `BEGIN` happens here on Snowflake\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if run_outside_transaction_hooks %}\n -- No transactions on BigQuery\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n {% endif %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_alias.sql", "original_file_path": "macros/etc/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/query.sql", "original_file_path": "macros/etc/query.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/is_incremental.sql", "original_file_path": "macros/etc/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\nselect *\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by 1\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--models` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/overview)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [chat](https://community.getdbt.com/) on Slack for live questions and support."}}, "exposures": {}, "selectors": {}, "disabled": [], "parent_map": {"model.test.my_model": []}, "child_map": {"model.test.my_model": []}} diff --git a/tests/functional/artifacts/data/state/v3/manifest.json b/tests/functional/artifacts/data/state/v3/manifest.json new file mode 100644 index 000000000..a9eb02068 --- /dev/null +++ b/tests/functional/artifacts/data/state/v3/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v3.json", "dbt_version": "0.21.1", "generated_at": "2022-06-08T05:12:26.978818Z", "invocation_id": "a2594229-14b7-46fe-864f-37cabb5f5f65", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "on_schema_change": "ignore", "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1654665147}}, "sources": {}, "macros": {"macro.test.drop_relation": {"unique_id": "macro.test.drop_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(dbt_labs_materialized_views.drop_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.test.postgres__list_relations_without_caching": {"unique_id": "macro.test.postgres__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.postgres__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.test.postgres_get_relations": {"unique_id": "macro.test.postgres_get_relations", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(dbt_labs_materialized_views.postgres_get_relations()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres_get_relations"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.test.redshift__list_relations_without_caching": {"unique_id": "macro.test.redshift__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "redshift__list_relations_without_caching", "macro_sql": "{% macro redshift__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.redshift__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.redshift__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.test.load_relation": {"unique_id": "macro.test.load_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(dbt_labs_materialized_views.redshift_load_relation_or_mv(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence = 'p' -- [p]ermanent table. Other values are [u]nlogged table, [t]emporary table\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% for column_name in column_dict %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "materialization_test_default", "macro_sql": "\n\n{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n \n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n \n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n \n {% do relations.append(target_relation) %}\n \n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n \n {{ adapter.commit() }}\n \n {% else %}\n\n {% set main_sql = sql %}\n \n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n \n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.column_list": {"unique_id": "macro.dbt.column_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list", "macro_sql": "{% macro column_list(columns) %}\n {%- for col in columns %}\n {{ col.name }} {% if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.column_list_for_create_table": {"unique_id": "macro.dbt.column_list_for_create_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list_for_create_table", "macro_sql": "{% macro column_list_for_create_table(columns) %}\n {%- for col in columns %}\n {{ col.name }} {{ col.data_type }} {%- if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = config.get('updated_at', snapshot_string_as_time(now)) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.run_query", "macro.dbt.snapshot_string_as_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_schema", "macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.incremental_upsert": {"unique_id": "macro.dbt.incremental_upsert", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/helpers.sql", "original_file_path": "macros/materializations/incremental/helpers.sql", "name": "incremental_upsert", "macro_sql": "{% macro incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name=\"main\") %}\n \n {%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') -%}\n\n {%- if unique_key is not none -%}\n delete\n from {{ target_relation }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ tmp_relation }}\n );\n {%- endif %}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ tmp_relation }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/incremental.sql", "original_file_path": "macros/materializations/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n -- the intermediate_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {% set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) %} \n {% set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {# -- first check whether we want to full refresh for source view or config reasons #}\n {% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n{% elif trigger_full_refresh %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + '__dbt_backup' %}\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {% do process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}\n \n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %} \n {% do adapter.rename_relation(target_relation, backup_relation) %} \n {% do adapter.rename_relation(intermediate_relation, target_relation) %} \n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.load_relation", "macro.dbt.make_temp_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.incremental_upsert", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n \n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n \n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n \n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n \n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n \n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n \n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n \n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }} \n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n \n {% set schema_changed = False %}\n \n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n \n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n \n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n \n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n \n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n \n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %} \n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n \n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n \n {% do log(schema_change_message) %}\n \n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n \n {% if on_schema_change != 'ignore' %}\n \n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n \n {% if schema_changes_dict['schema_changed'] %}\n \n {% if on_schema_change == 'fail' %}\n \n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways: \n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n \n {% do exceptions.raise_compiler_error(fail_msg) %}\n \n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n \n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {% endif %}\n \n {% endif %}\n \n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.common_get_delete_insert_merge_sql": {"unique_id": "macro.dbt.common_get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "common_get_delete_insert_merge_sql", "macro_sql": "{% macro common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.common_get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/table/table.sql", "original_file_path": "macros/materializations/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_table_as(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.create_indexes", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/view.sql", "original_file_path": "macros/materializations/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_alias.sql", "original_file_path": "macros/etc/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_alias.sql", "original_file_path": "macros/etc/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/query.sql", "original_file_path": "macros/etc/query.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/is_incremental.sql", "original_file_path": "macros/etc/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/where_subquery.sql", "original_file_path": "macros/etc/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/where_subquery.sql", "original_file_path": "macros/etc/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name, node) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n \n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n \n {% set sql -%}\n \n alter {{ relation.type }} {{ relation }}\n \n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n \n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n \n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\nselect *\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--models` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/overview)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [chat](https://community.getdbt.com/) on Slack for live questions and support."}}, "exposures": {}, "selectors": {}, "disabled": [], "parent_map": {"model.test.my_model": []}, "child_map": {"model.test.my_model": []}} diff --git a/tests/functional/artifacts/data/state/v4/manifest.json b/tests/functional/artifacts/data/state/v4/manifest.json new file mode 100644 index 000000000..67b7c244c --- /dev/null +++ b/tests/functional/artifacts/data/state/v4/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v4.json", "dbt_version": "1.0.8", "generated_at": "2022-09-13T08:43:20.641750Z", "invocation_id": "5da6faab-41cb-4180-ab19-8375c0e1f1a5", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "on_schema_change": "ignore", "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1663058601.2387}}, "sources": {}, "macros": {"macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.6944451}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.696331}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.712787}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.714135}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.714985}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7158241}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.717091}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.718103}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7185578}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7194948}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.720533}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.720746}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.72122}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.721492}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7240572}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7252119}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7258098}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.727401}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.729517}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.732625}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7331321}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7335098}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.733889}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7342541}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.735413}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.736206}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.737014}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.738338}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7390552}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7487428}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7491798}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.74975}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7501109}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7503612}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7524228}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.752844}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.753275}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.755879}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = config.get('updated_at', snapshot_string_as_time(now)) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.run_query", "macro.dbt.snapshot_string_as_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.760828}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7705312}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7712681}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.771702}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.77192}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.772458}, "macro.dbt.default__snapshot_staging_table": {"unique_id": "macro.dbt.default__snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7748082}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.775297}, "macro.dbt.default__build_snapshot_table": {"unique_id": "macro.dbt.default__build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.775959}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.777101}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_schema", "macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7941692}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "name": "materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n \n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n \n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n \n {% do relations.append(target_relation) %}\n \n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n \n {{ adapter.commit() }}\n \n {% else %}\n\n {% set main_sql = sql %}\n \n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n \n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7998898}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.801226}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8020391}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.803186}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.804178}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n \n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.806505}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n \n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n \n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8079581}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n \n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }} \n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.809567}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8179982}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8210711}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.821682}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8228061}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.823494}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.825151}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8269792}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n -- the intermediate_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {% set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) %} \n {% set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {# -- first check whether we want to full refresh for source view or config reasons #}\n {% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n{% elif trigger_full_refresh %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + '__dbt_backup' %}\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = get_delete_insert_merge_sql(target_relation, tmp_relation, unique_key, dest_columns) %}\n \n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %} \n {% do adapter.rename_relation(target_relation, backup_relation) %} \n {% do adapter.rename_relation(intermediate_relation, target_relation) %} \n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.make_temp_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.get_delete_insert_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.840609}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n \n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n \n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n \n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n \n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8552}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n \n {% set schema_changed = False %}\n \n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n \n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n \n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.858468}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n \n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n \n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %} \n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n \n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n \n {% do log(schema_change_message) %}\n \n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8616462}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n \n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n \n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n \n {% if schema_changes_dict['schema_changed'] %}\n \n {% if on_schema_change == 'fail' %}\n \n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways: \n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n \n {% do exceptions.raise_compiler_error(fail_msg) %}\n \n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n \n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {% endif %}\n \n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n \n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8635662}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.874618}, "macro.dbt.get_create_table_as_sql": {"unique_id": "macro.dbt.get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8760588}, "macro.dbt.default__get_create_table_as_sql": {"unique_id": "macro.dbt.default__get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.876541}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.877064}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n \n {{ sql_header if sql_header is not none }}\n \n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.878151}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.888351}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.889299}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.889876}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.895395}, "macro.dbt.get_create_view_as_sql": {"unique_id": "macro.dbt.get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8964942}, "macro.dbt.default__get_create_view_as_sql": {"unique_id": "macro.dbt.default__get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8969111}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.897373}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.898056}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.905844}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9181492}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.920773}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.921389}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.922673}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9230442}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.923341}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.923764}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.924055}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.925326}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9259362}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9291992}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9304068}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.930997}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.93258}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.933256}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.933998}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.93523}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.93588}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9369469}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\nselect *\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9375691}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9383721}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.940022}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.943184}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.944719}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.94549}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.950737}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.954061}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.956046}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.956654}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9578362}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9583108}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9587321}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9592059}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9605691}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9609149}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9613152}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.962351}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9670699}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.967857}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9683268}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.968875}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.969353}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9697769}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9703019}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.971044}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9716868}, "macro.dbt.default__get_or_create_relation": {"unique_id": "macro.dbt.default__get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.973294}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.973898}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.974398}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9758022}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9761899}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9767869}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.977885}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.979867}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.98031}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.980829}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9812522}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.981956}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9832299}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9874718}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9881449}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.98862}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.989036}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.989502}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.990141}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9906712}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.991599}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.992081}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9924932}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9975512}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.997951}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9987469}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9992452}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.000154}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.0007622}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.0023592}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.003041}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n \n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n \n {% set sql -%}\n \n alter {{ relation.type }} {{ relation }}\n \n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n \n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n \n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.005121}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.006654}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.007256}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.007999}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.008718}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {"metric.test.my_metric": {"fqn": ["test", "my_metric"], "unique_id": "metric.test.my_metric", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "metric.yml", "original_file_path": "models/metric.yml", "model": "ref('my_model')", "name": "my_metric", "description": "", "label": "Count records", "type": "count", "sql": "*", "timestamp": "updated_at", "filters": [], "time_grains": ["day"], "dimensions": [], "resource_type": "metric", "meta": {}, "tags": [], "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "created_at": 1663058601.2723079}}, "selectors": {}, "disabled": {}, "parent_map": {"model.test.my_model": [], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["metric.test.my_metric"], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v5/manifest.json b/tests/functional/artifacts/data/state/v5/manifest.json new file mode 100644 index 000000000..d6662b2a6 --- /dev/null +++ b/tests/functional/artifacts/data/state/v5/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v5.json", "dbt_version": "1.1.2", "generated_at": "2022-09-13T08:43:05.173401Z", "invocation_id": "46690f0c-35b6-44f7-95bc-3a91cbf87484", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1663058585.790391}}, "sources": {}, "macros": {"macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2411761}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2429922}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.258873}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.260246}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.261101}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2619379}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.263221}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.264239}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.264697}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.265624}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2666838}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.266898}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2673979}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.267664}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.270232}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.271394}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.271986}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.273597}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.275718}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.278846}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2793732}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2797408}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.280107}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.280468}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.28161}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.28241}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.283215}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.284517}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.285232}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2955132}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.295966}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.296581}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2969642}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.297218}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.29933}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2997508}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3001878}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3028228}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3070579}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3179069}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.318669}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3191}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3193212}, "macro.dbt.get_true_sql": {"unique_id": "macro.dbt.get_true_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.319695}, "macro.dbt.default__get_true_sql": {"unique_id": "macro.dbt.default__get_true_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.319982}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.320519}, "macro.dbt.default__snapshot_staging_table": {"unique_id": "macro.dbt.default__snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.322867}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.323358}, "macro.dbt.default__build_snapshot_table": {"unique_id": "macro.dbt.default__build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.324033}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.325182}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.34037}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "name": "materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.346029}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3473558}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.348177}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.349312}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.350326}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.352607}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.35407}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3557}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.370045}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.374055}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.401227}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last }}\n {% endfor %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4033751}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4041102}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.405853}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4078321}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n -- the intermediate_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {% set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier,\n schema=schema,\n database=database) %}\n {% set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {# -- first check whether we want to full refresh for source view or config reasons #}\n {% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n{% elif trigger_full_refresh %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + '__dbt_backup' %}\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = get_delete_insert_merge_sql(target_relation, tmp_relation, unique_key, dest_columns) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.make_temp_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.get_delete_insert_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.421077}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.434217}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.43755}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.440778}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.442719}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier,\n schema=schema,\n database=database) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.452991}, "macro.dbt.get_create_table_as_sql": {"unique_id": "macro.dbt.get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.454284}, "macro.dbt.default__get_create_table_as_sql": {"unique_id": "macro.dbt.default__get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.454771}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.455303}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.456414}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier,\n schema=schema,\n database=database) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4662242}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.467168}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.46778}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4711108}, "macro.dbt.get_create_view_as_sql": {"unique_id": "macro.dbt.get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4722078}, "macro.dbt.default__get_create_view_as_sql": {"unique_id": "macro.dbt.default__get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.472634}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.473103}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.473792}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4816241}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.493353}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.495795}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4964218}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.497696}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.498073}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.498365}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.498788}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.499098}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.500374}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.500983}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5042381}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5054028}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.505999}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.507547}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.508227}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.508971}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5101619}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.510827}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.511889}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5128162}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5136151}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.515251}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.518385}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.519926}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5207071}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.525802}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.52915}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.53116}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5317729}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.532931}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.533409}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.533822}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5342898}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5356438}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.536011}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.536417}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5374599}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.542093}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5428941}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.543366}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.543921}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.544389}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.544798}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5453591}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.546091}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.546772}, "macro.dbt.default__get_or_create_relation": {"unique_id": "macro.dbt.default__get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.548408}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.54901}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.549522}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.550913}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.551292}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5518851}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.552993}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.554944}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5553741}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5559008}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.556353}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.557066}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.55835}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5624719}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.563156}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.563638}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.564032}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.564497}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.565144}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.565687}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.566637}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5671191}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.567532}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.572185}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.572603}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5734131}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.573897}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5747972}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5754082}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.577007}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.577695}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.579798}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.581336}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.581941}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.582701}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.583425}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {"metric.test.my_metric": {"fqn": ["test", "my_metric"], "unique_id": "metric.test.my_metric", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "metric.yml", "original_file_path": "models/metric.yml", "model": "ref('my_model')", "name": "my_metric", "description": "", "label": "Count records", "type": "count", "sql": "*", "timestamp": "updated_at", "filters": [], "time_grains": ["day"], "dimensions": [], "resource_type": "metric", "meta": {}, "tags": [], "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "created_at": 1663058585.822956}}, "selectors": {}, "disabled": {}, "parent_map": {"model.test.my_model": [], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["metric.test.my_metric"], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v6/manifest.json b/tests/functional/artifacts/data/state/v6/manifest.json new file mode 100644 index 000000000..e98ee672b --- /dev/null +++ b/tests/functional/artifacts/data/state/v6/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v6.json", "dbt_version": "1.2.1", "generated_at": "2022-09-13T08:42:50.298210Z", "invocation_id": "aa834731-46c3-49aa-8ec8-956dae621b58", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1663058263.145605}}, "sources": {}, "macros": {"macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.441694}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.443508}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4609761}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.462327}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4631748}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.46401}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.465282}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.46641}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4668732}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4678211}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4688902}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.46909}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.469568}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.469843}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.472267}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.472806}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.473689}, "macro.dbt_postgres.postgres__make_backup_relation": {"unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4744241}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.475544}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4761422}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.477773}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.478305}, "macro.dbt_postgres.postgres__copy_grants": {"unique_id": "macro.dbt_postgres.postgres__copy_grants", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.478604}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.480701}, "macro.dbt_postgres.postgres__dateadd": {"unique_id": "macro.dbt_postgres.postgres__dateadd", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.481434}, "macro.dbt_postgres.postgres__listagg": {"unique_id": "macro.dbt_postgres.postgres__listagg", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.483254}, "macro.dbt_postgres.postgres__datediff": {"unique_id": "macro.dbt_postgres.postgres__datediff", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.492169}, "macro.dbt_postgres.postgres__any_value": {"unique_id": "macro.dbt_postgres.postgres__any_value", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4927092}, "macro.dbt_postgres.postgres__last_day": {"unique_id": "macro.dbt_postgres.postgres__last_day", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.494153}, "macro.dbt_postgres.postgres__split_part": {"unique_id": "macro.dbt_postgres.postgres__split_part", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.495394}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.498407}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4989262}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.499295}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4996538}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5000162}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5011232}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.50192}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.502719}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.50401}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.504728}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.51448}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.51492}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5155}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.515872}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.516114}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.518018}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5184388}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5188859}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_sql'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.522562}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.526156}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.53722}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.537965}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.538402}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.538624}, "macro.dbt.get_true_sql": {"unique_id": "macro.dbt.get_true_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.538997}, "macro.dbt.default__get_true_sql": {"unique_id": "macro.dbt.default__get_true_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.539289}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.539823}, "macro.dbt.default__snapshot_staging_table": {"unique_id": "macro.dbt.default__snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.542159}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5426402}, "macro.dbt.default__build_snapshot_table": {"unique_id": "macro.dbt.default__build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.543298}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5444548}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.561558}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "name": "materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5671492}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5684512}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.569262}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.570368}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5713542}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5735822}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5750248}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.576639}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.591182}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.595134}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.622782}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last }}\n {% endfor %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.624852}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.625576}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.627269}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6292028}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = get_delete_insert_merge_sql(target_relation, temp_relation, unique_key, dest_columns) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.get_delete_insert_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6413598}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.654586}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6579268}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6611688}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.663094}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.670167}, "macro.dbt.get_create_table_as_sql": {"unique_id": "macro.dbt.get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6714208}, "macro.dbt.default__get_create_table_as_sql": {"unique_id": "macro.dbt.default__get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.671903}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.67243}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.673532}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.680615}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6815622}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.682152}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.686346}, "macro.dbt.get_create_view_as_sql": {"unique_id": "macro.dbt.get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.687447}, "macro.dbt.default__get_create_view_as_sql": {"unique_id": "macro.dbt.default__get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6878788}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.688359}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6890602}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.698056}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7117178}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7141619}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.714784}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.716077}, "macro.dbt.get_csv_sql": {"unique_id": "macro.dbt.get_csv_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.716581}, "macro.dbt.default__get_csv_sql": {"unique_id": "macro.dbt.default__get_csv_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.716925}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.717283}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.717576}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.717988}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.718283}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.719548}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.720028}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.723425}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.724572}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.725169}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.72671}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.727396}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.728158}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.729355}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.73002}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7310588}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.731988}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.732775}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.734423}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.737539}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.739098}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.739882}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.744986}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7483132}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.750348}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.750963}, "macro.dbt.except": {"unique_id": "macro.dbt.except", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "name": "except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7517078}, "macro.dbt.default__except": {"unique_id": "macro.dbt.default__except", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "name": "default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.751895}, "macro.dbt.replace": {"unique_id": "macro.dbt.replace", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "name": "replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.752911}, "macro.dbt.default__replace": {"unique_id": "macro.dbt.default__replace", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "name": "default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.753338}, "macro.dbt.concat": {"unique_id": "macro.dbt.concat", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "name": "concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7541351}, "macro.dbt.default__concat": {"unique_id": "macro.dbt.default__concat", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "name": "default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.754451}, "macro.dbt.length": {"unique_id": "macro.dbt.length", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "name": "length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.75524}, "macro.dbt.default__length": {"unique_id": "macro.dbt.default__length", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "name": "default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.755511}, "macro.dbt.dateadd": {"unique_id": "macro.dbt.dateadd", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7565598}, "macro.dbt.default__dateadd": {"unique_id": "macro.dbt.default__dateadd", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.756995}, "macro.dbt.intersect": {"unique_id": "macro.dbt.intersect", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "name": "intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7577422}, "macro.dbt.default__intersect": {"unique_id": "macro.dbt.default__intersect", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "name": "default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7579389}, "macro.dbt.escape_single_quotes": {"unique_id": "macro.dbt.escape_single_quotes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "name": "escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7587788}, "macro.dbt.default__escape_single_quotes": {"unique_id": "macro.dbt.default__escape_single_quotes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "name": "default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.75913}, "macro.dbt.right": {"unique_id": "macro.dbt.right", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "name": "right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.760025}, "macro.dbt.default__right": {"unique_id": "macro.dbt.default__right", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "name": "default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.760372}, "macro.dbt.listagg": {"unique_id": "macro.dbt.listagg", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.762194}, "macro.dbt.default__listagg": {"unique_id": "macro.dbt.default__listagg", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7634282}, "macro.dbt.datediff": {"unique_id": "macro.dbt.datediff", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.764456}, "macro.dbt.default__datediff": {"unique_id": "macro.dbt.default__datediff", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.764895}, "macro.dbt.safe_cast": {"unique_id": "macro.dbt.safe_cast", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "name": "safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.76579}, "macro.dbt.default__safe_cast": {"unique_id": "macro.dbt.default__safe_cast", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "name": "default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7661529}, "macro.dbt.hash": {"unique_id": "macro.dbt.hash", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "name": "hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.766952}, "macro.dbt.default__hash": {"unique_id": "macro.dbt.default__hash", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "name": "default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.767371}, "macro.dbt.cast_bool_to_text": {"unique_id": "macro.dbt.cast_bool_to_text", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "name": "cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7681432}, "macro.dbt.default__cast_bool_to_text": {"unique_id": "macro.dbt.default__cast_bool_to_text", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "name": "default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.768564}, "macro.dbt.any_value": {"unique_id": "macro.dbt.any_value", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.769363}, "macro.dbt.default__any_value": {"unique_id": "macro.dbt.default__any_value", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.769631}, "macro.dbt.position": {"unique_id": "macro.dbt.position", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "name": "position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.770546}, "macro.dbt.default__position": {"unique_id": "macro.dbt.default__position", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "name": "default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7708979}, "macro.dbt.string_literal": {"unique_id": "macro.dbt.string_literal", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "name": "string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.77168}, "macro.dbt.default__string_literal": {"unique_id": "macro.dbt.default__string_literal", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "name": "default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.771949}, "macro.dbt.type_string": {"unique_id": "macro.dbt.type_string", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.775103}, "macro.dbt.default__type_string": {"unique_id": "macro.dbt.default__type_string", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7755132}, "macro.dbt.type_timestamp": {"unique_id": "macro.dbt.type_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7759519}, "macro.dbt.default__type_timestamp": {"unique_id": "macro.dbt.default__type_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.776351}, "macro.dbt.type_float": {"unique_id": "macro.dbt.type_float", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7769148}, "macro.dbt.default__type_float": {"unique_id": "macro.dbt.default__type_float", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.777309}, "macro.dbt.type_numeric": {"unique_id": "macro.dbt.type_numeric", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7777238}, "macro.dbt.default__type_numeric": {"unique_id": "macro.dbt.default__type_numeric", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.778186}, "macro.dbt.type_bigint": {"unique_id": "macro.dbt.type_bigint", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7786}, "macro.dbt.default__type_bigint": {"unique_id": "macro.dbt.default__type_bigint", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.778995}, "macro.dbt.type_int": {"unique_id": "macro.dbt.type_int", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7794151}, "macro.dbt.default__type_int": {"unique_id": "macro.dbt.default__type_int", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.779795}, "macro.dbt.bool_or": {"unique_id": "macro.dbt.bool_or", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "name": "bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.780659}, "macro.dbt.default__bool_or": {"unique_id": "macro.dbt.default__bool_or", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "name": "default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.780927}, "macro.dbt.last_day": {"unique_id": "macro.dbt.last_day", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.782002}, "macro.dbt.default_last_day": {"unique_id": "macro.dbt.default_last_day", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.782704}, "macro.dbt.default__last_day": {"unique_id": "macro.dbt.default__last_day", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7830899}, "macro.dbt.split_part": {"unique_id": "macro.dbt.split_part", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7846441}, "macro.dbt.default__split_part": {"unique_id": "macro.dbt.default__split_part", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7850811}, "macro.dbt._split_part_negative": {"unique_id": "macro.dbt._split_part_negative", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "_split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.785673}, "macro.dbt.date_trunc": {"unique_id": "macro.dbt.date_trunc", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "name": "date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7865448}, "macro.dbt.default__date_trunc": {"unique_id": "macro.dbt.default__date_trunc", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "name": "default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.786887}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.788033}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7884989}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.788918}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7895281}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.79088}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.791239}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.791649}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.792712}, "macro.dbt.make_intermediate_relation": {"unique_id": "macro.dbt.make_intermediate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.80177}, "macro.dbt.default__make_intermediate_relation": {"unique_id": "macro.dbt.default__make_intermediate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.802206}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.802784}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8035462}, "macro.dbt.make_backup_relation": {"unique_id": "macro.dbt.make_backup_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.804203}, "macro.dbt.default__make_backup_relation": {"unique_id": "macro.dbt.default__make_backup_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.805045}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.805522}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8060842}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.806554}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.806978}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8075058}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.808251}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.808909}, "macro.dbt.default__get_or_create_relation": {"unique_id": "macro.dbt.default__get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.810538}, "macro.dbt.load_cached_relation": {"unique_id": "macro.dbt.load_cached_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8111548}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.811518}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.812018}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.813409}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.813792}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8143969}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.815532}, "macro.dbt.copy_grants": {"unique_id": "macro.dbt.copy_grants", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.820182}, "macro.dbt.default__copy_grants": {"unique_id": "macro.dbt.default__copy_grants", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.820483}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.820917}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8212}, "macro.dbt.should_revoke": {"unique_id": "macro.dbt.should_revoke", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8221052}, "macro.dbt.get_show_grant_sql": {"unique_id": "macro.dbt.get_show_grant_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8225951}, "macro.dbt.default__get_show_grant_sql": {"unique_id": "macro.dbt.default__get_show_grant_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.822855}, "macro.dbt.get_grant_sql": {"unique_id": "macro.dbt.get_grant_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.823464}, "macro.dbt.default__get_grant_sql": {"unique_id": "macro.dbt.default__get_grant_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.823967}, "macro.dbt.get_revoke_sql": {"unique_id": "macro.dbt.get_revoke_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8245819}, "macro.dbt.default__get_revoke_sql": {"unique_id": "macro.dbt.default__get_revoke_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.825077}, "macro.dbt.get_dcl_statement_list": {"unique_id": "macro.dbt.get_dcl_statement_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.825684}, "macro.dbt.default__get_dcl_statement_list": {"unique_id": "macro.dbt.default__get_dcl_statement_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.827622}, "macro.dbt.call_dcl_statements": {"unique_id": "macro.dbt.call_dcl_statements", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.828118}, "macro.dbt.default__call_dcl_statements": {"unique_id": "macro.dbt.default__call_dcl_statements", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.828738}, "macro.dbt.apply_grants": {"unique_id": "macro.dbt.apply_grants", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8293319}, "macro.dbt.default__apply_grants": {"unique_id": "macro.dbt.default__apply_grants", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8324008}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.834379}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.834809}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.835356}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.835799}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8365178}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.837826}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.841973}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.842654}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.843149}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8435562}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8440409}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8447008}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.845243}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.846046}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.846539}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.846957}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.851595}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.852285}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.853091}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.853559}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.854439}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.85503}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.856618}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8572822}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8593512}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.87712}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.877723}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.878474}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.879189}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {"metric.test.my_metric": {"fqn": ["test", "my_metric"], "unique_id": "metric.test.my_metric", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "metric.yml", "original_file_path": "models/metric.yml", "name": "my_metric", "description": "", "label": "Count records", "type": "count", "sql": "*", "timestamp": "updated_at", "filters": [], "time_grains": ["day"], "dimensions": [], "model": "ref('my_model')", "model_unique_id": null, "resource_type": "metric", "meta": {}, "tags": [], "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "metrics": [], "created_at": 1663058517.2551522}}, "selectors": {}, "disabled": {}, "parent_map": {"model.test.my_model": [], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["metric.test.my_metric"], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v7/manifest.json b/tests/functional/artifacts/data/state/v7/manifest.json new file mode 100644 index 000000000..e8529bd3c --- /dev/null +++ b/tests/functional/artifacts/data/state/v7/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v7.json", "dbt_version": "1.3.2", "generated_at": "2023-02-13T21:34:36.870255Z", "invocation_id": "96c0aa43-0ccd-4420-a50c-05c0f22a0df1", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "raw_code": "select 1 as id", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "2b9123e04ab8bb798f7c565afdc3ee0e56fcd66b4bfbdb435b4891c878d947c5"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.566336}, "snapshot.test.snapshot_seed": {"resource_type": "snapshot", "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "strategy": "check", "target_schema": "test16763240740000063267_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "unique_id": "snapshot.test.snapshot_seed", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "name": "snapshot_seed", "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "00c13c306831070996970605fbc4c901aa456e1ed1c028725a932e4e6a4ffb0a"}, "tags": [], "refs": [["my_seed"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763240740000063267_test_previous_version_state"}, "created_at": 1676324075.423856}, "analysis.test.a": {"resource_type": "analysis", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "analysis", "a"], "unique_id": "analysis.test.a", "raw_code": "select 4 as id", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "name": "a", "alias": "a", "checksum": {"name": "sha256", "checksum": "bd1ee600e4e80d03f488fee52a66e8d51b5be2b98acc20df1cf8be4670d86ae5"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.453177}, "test.test.just_my": {"resource_type": "test", "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "just_my"], "unique_id": "test.test.just_my", "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "name": "just_my", "alias": "just_my", "checksum": {"name": "sha256", "checksum": "f30b7a814e0e3761d1a8042aa40d658d6c33affb28cd92782b0f56559c414fd8"}, "tags": ["data_test_tag"], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1676324075.520421}, "seed.test.my_seed": {"resource_type": "seed", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "my_seed"], "unique_id": "seed.test.my_seed", "raw_code": "", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "name": "my_seed", "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "22697c9b76d73a6c7561554ddb2ce101428ea2737ba8dc500d52ebcfdcfcfc13"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.542836}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "resource_type": "test", "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "not_null_my_model_id"], "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "name": "not_null_my_model_id", "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "tags": [], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.575407, "column_name": "id", "file_key_name": "models.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "resource_type": "test", "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "check_nothing_my_model_"], "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "name": "check_nothing_my_model_", "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "tags": [], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.577614, "column_name": null, "file_key_name": "models.my_model"}}, "sources": {"source.test.my_source.my_table": {"fqn": ["test", "my_source", "my_table"], "database": "dbt", "schema": "my_source", "unique_id": "source.test.my_source.my_table", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "name": "my_table", "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "resource_type": "source", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1676324075.624893}}, "macros": {"macro.test.test_check_nothing": {"unique_id": "macro.test.test_check_nothing", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "name": "test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.712246, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"unique_id": "macro.test.test_disabled_check_nothing", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "name": "test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.713166, "supported_languages": null}, "macro.test.do_nothing": {"unique_id": "macro.test.do_nothing", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "name": "do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7140381, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.714999, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.715521, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7158241, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.716109, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7164018, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.719163, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7208161, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.733047, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.734891, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.735866, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.736712, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.738183, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.739969, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.740651, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.741567, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.742538, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.744768, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.745294, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.746094, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.746795, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.747864, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7484329, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7500181, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.750537, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"unique_id": "macro.dbt_postgres.postgres__copy_grants", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.750845, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "name": "postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.752257, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.754163, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"unique_id": "macro.dbt_postgres.postgres__dateadd", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.755116, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"unique_id": "macro.dbt_postgres.postgres__listagg", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.757173, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"unique_id": "macro.dbt_postgres.postgres__datediff", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.765253, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"unique_id": "macro.dbt_postgres.postgres__any_value", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.766171, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"unique_id": "macro.dbt_postgres.postgres__last_day", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.767721, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"unique_id": "macro.dbt_postgres.postgres__split_part", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7690408, "supported_languages": null}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7719731, "supported_languages": null}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.772469, "supported_languages": null}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7729542, "supported_languages": null}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.773334, "supported_languages": null}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.773783, "supported_languages": null}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7750452, "supported_languages": null}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.775828, "supported_languages": null}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.776683, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.778024, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.778701, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.786231, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.78667, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.787231, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.788982, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.789396, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.789981, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_code'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.793568, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7968981, "supported_languages": null}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.80455, "supported_languages": null}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.805295, "supported_languages": null}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.805722, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.805967, "supported_languages": null}, "macro.dbt.get_true_sql": {"unique_id": "macro.dbt.get_true_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8063412, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"unique_id": "macro.dbt.default__get_true_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.806646, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.807165, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"unique_id": "macro.dbt.default__snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.80939, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.809878, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"unique_id": "macro.dbt.default__build_snapshot_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.810522, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.811589, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.826415, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "name": "materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.831668, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8331032, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8338718, "supported_languages": null}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8350341, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.835979, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8394618, "supported_languages": null}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8409832, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.842655, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"unique_id": "macro.dbt.get_merge_update_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8432481, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"unique_id": "macro.dbt.default__get_merge_update_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.845012, "supported_languages": null}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8534558, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.857812, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.85844, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last }}\n {% endfor %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8602839, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.860947, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.862558, "supported_languages": null}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.864465, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"unique_id": "macro.dbt.get_incremental_append_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.866665, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"unique_id": "macro.dbt.default__get_incremental_append_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.867279, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"unique_id": "macro.dbt.get_incremental_delete_insert_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.867753, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.868414, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"unique_id": "macro.dbt.get_incremental_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.868883, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"unique_id": "macro.dbt.default__get_incremental_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8695412, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.870015, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"predicates\"])) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8706748, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"unique_id": "macro.dbt.get_incremental_default_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8712032, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"unique_id": "macro.dbt.default__get_incremental_default_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.871608, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"unique_id": "macro.dbt.get_insert_into_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.872406, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8839822, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.894743, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8977559, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.900897, "supported_languages": null}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9030159, "supported_languages": null}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.909476, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"unique_id": "macro.dbt.get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.911144, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"unique_id": "macro.dbt.default__get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.911638, "supported_languages": null}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.912726, "supported_languages": null}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9137652, "supported_languages": null}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.919893, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.921067, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.921632, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.925656, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"unique_id": "macro.dbt.get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.926919, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"unique_id": "macro.dbt.default__get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.927346, "supported_languages": null}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9278772, "supported_languages": null}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.928556, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9364128, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9457762, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.948132, "supported_languages": null}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.948717, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9499142, "supported_languages": null}, "macro.dbt.get_csv_sql": {"unique_id": "macro.dbt.get_csv_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9503942, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"unique_id": "macro.dbt.default__get_csv_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.950737, "supported_languages": null}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9510899, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.951392, "supported_languages": null}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9517949, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9521081, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9532878, "supported_languages": null}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9537542, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.957227, "supported_languages": null}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.958654, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.959223, "supported_languages": null}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9607859, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.961487, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.962187, "supported_languages": null}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.963528, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9641569, "supported_languages": null}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9653509, "supported_languages": null}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.966414, "supported_languages": null}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.967387, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.969207, "supported_languages": null}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.973081, "supported_languages": null}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.974716, "supported_languages": null}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9754639, "supported_languages": null}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.979558, "supported_languages": null}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.982544, "supported_languages": null}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.984403, "supported_languages": null}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9849951, "supported_languages": null}, "macro.dbt.except": {"unique_id": "macro.dbt.except", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "name": "except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.98597, "supported_languages": null}, "macro.dbt.default__except": {"unique_id": "macro.dbt.default__except", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "name": "default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.986175, "supported_languages": null}, "macro.dbt.replace": {"unique_id": "macro.dbt.replace", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "name": "replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9873059, "supported_languages": null}, "macro.dbt.default__replace": {"unique_id": "macro.dbt.default__replace", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "name": "default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.987733, "supported_languages": null}, "macro.dbt.concat": {"unique_id": "macro.dbt.concat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "name": "concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.98864, "supported_languages": null}, "macro.dbt.default__concat": {"unique_id": "macro.dbt.default__concat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "name": "default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.988965, "supported_languages": null}, "macro.dbt.length": {"unique_id": "macro.dbt.length", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "name": "length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.990331, "supported_languages": null}, "macro.dbt.default__length": {"unique_id": "macro.dbt.default__length", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "name": "default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9906292, "supported_languages": null}, "macro.dbt.dateadd": {"unique_id": "macro.dbt.dateadd", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.991911, "supported_languages": null}, "macro.dbt.default__dateadd": {"unique_id": "macro.dbt.default__dateadd", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9923341, "supported_languages": null}, "macro.dbt.intersect": {"unique_id": "macro.dbt.intersect", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "name": "intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.993244, "supported_languages": null}, "macro.dbt.default__intersect": {"unique_id": "macro.dbt.default__intersect", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "name": "default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.993451, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"unique_id": "macro.dbt.escape_single_quotes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "name": "escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.994604, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"unique_id": "macro.dbt.default__escape_single_quotes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "name": "default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.995014, "supported_languages": null}, "macro.dbt.right": {"unique_id": "macro.dbt.right", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "name": "right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9960818, "supported_languages": null}, "macro.dbt.default__right": {"unique_id": "macro.dbt.default__right", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "name": "default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9965801, "supported_languages": null}, "macro.dbt.listagg": {"unique_id": "macro.dbt.listagg", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9982479, "supported_languages": null}, "macro.dbt.default__listagg": {"unique_id": "macro.dbt.default__listagg", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.999271, "supported_languages": null}, "macro.dbt.datediff": {"unique_id": "macro.dbt.datediff", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.000368, "supported_languages": null}, "macro.dbt.default__datediff": {"unique_id": "macro.dbt.default__datediff", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0007942, "supported_languages": null}, "macro.dbt.safe_cast": {"unique_id": "macro.dbt.safe_cast", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "name": "safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0018618, "supported_languages": null}, "macro.dbt.default__safe_cast": {"unique_id": "macro.dbt.default__safe_cast", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "name": "default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.002232, "supported_languages": null}, "macro.dbt.hash": {"unique_id": "macro.dbt.hash", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "name": "hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.003175, "supported_languages": null}, "macro.dbt.default__hash": {"unique_id": "macro.dbt.default__hash", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "name": "default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0035892, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"unique_id": "macro.dbt.cast_bool_to_text", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "name": "cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.004607, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"unique_id": "macro.dbt.default__cast_bool_to_text", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "name": "default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.005036, "supported_languages": null}, "macro.dbt.any_value": {"unique_id": "macro.dbt.any_value", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.00601, "supported_languages": null}, "macro.dbt.default__any_value": {"unique_id": "macro.dbt.default__any_value", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0063071, "supported_languages": null}, "macro.dbt.position": {"unique_id": "macro.dbt.position", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "name": "position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.007398, "supported_languages": null}, "macro.dbt.default__position": {"unique_id": "macro.dbt.default__position", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "name": "default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.007754, "supported_languages": null}, "macro.dbt.string_literal": {"unique_id": "macro.dbt.string_literal", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "name": "string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.008699, "supported_languages": null}, "macro.dbt.default__string_literal": {"unique_id": "macro.dbt.default__string_literal", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "name": "default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.008971, "supported_languages": null}, "macro.dbt.type_string": {"unique_id": "macro.dbt.type_string", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.011225, "supported_languages": null}, "macro.dbt.default__type_string": {"unique_id": "macro.dbt.default__type_string", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.011629, "supported_languages": null}, "macro.dbt.type_timestamp": {"unique_id": "macro.dbt.type_timestamp", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.012175, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"unique_id": "macro.dbt.default__type_timestamp", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0125592, "supported_languages": null}, "macro.dbt.type_float": {"unique_id": "macro.dbt.type_float", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.012963, "supported_languages": null}, "macro.dbt.default__type_float": {"unique_id": "macro.dbt.default__type_float", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.013345, "supported_languages": null}, "macro.dbt.type_numeric": {"unique_id": "macro.dbt.type_numeric", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.013751, "supported_languages": null}, "macro.dbt.default__type_numeric": {"unique_id": "macro.dbt.default__type_numeric", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0141928, "supported_languages": null}, "macro.dbt.type_bigint": {"unique_id": "macro.dbt.type_bigint", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.014601, "supported_languages": null}, "macro.dbt.default__type_bigint": {"unique_id": "macro.dbt.default__type_bigint", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.014984, "supported_languages": null}, "macro.dbt.type_int": {"unique_id": "macro.dbt.type_int", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.015394, "supported_languages": null}, "macro.dbt.default__type_int": {"unique_id": "macro.dbt.default__type_int", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.015761, "supported_languages": null}, "macro.dbt.type_boolean": {"unique_id": "macro.dbt.type_boolean", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.016257, "supported_languages": null}, "macro.dbt.default__type_boolean": {"unique_id": "macro.dbt.default__type_boolean", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.01665, "supported_languages": null}, "macro.dbt.array_concat": {"unique_id": "macro.dbt.array_concat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "name": "array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.017704, "supported_languages": null}, "macro.dbt.default__array_concat": {"unique_id": "macro.dbt.default__array_concat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "name": "default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0180538, "supported_languages": null}, "macro.dbt.bool_or": {"unique_id": "macro.dbt.bool_or", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "name": "bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.01898, "supported_languages": null}, "macro.dbt.default__bool_or": {"unique_id": "macro.dbt.default__bool_or", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "name": "default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.019255, "supported_languages": null}, "macro.dbt.last_day": {"unique_id": "macro.dbt.last_day", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.020355, "supported_languages": null}, "macro.dbt.default_last_day": {"unique_id": "macro.dbt.default_last_day", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0210161, "supported_languages": null}, "macro.dbt.default__last_day": {"unique_id": "macro.dbt.default__last_day", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.021465, "supported_languages": null}, "macro.dbt.split_part": {"unique_id": "macro.dbt.split_part", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.023027, "supported_languages": null}, "macro.dbt.default__split_part": {"unique_id": "macro.dbt.default__split_part", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.023608, "supported_languages": null}, "macro.dbt._split_part_negative": {"unique_id": "macro.dbt._split_part_negative", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "_split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.024339, "supported_languages": null}, "macro.dbt.date_trunc": {"unique_id": "macro.dbt.date_trunc", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "name": "date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.02547, "supported_languages": null}, "macro.dbt.default__date_trunc": {"unique_id": "macro.dbt.default__date_trunc", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "name": "default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0258088, "supported_languages": null}, "macro.dbt.array_construct": {"unique_id": "macro.dbt.array_construct", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "name": "array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0271108, "supported_languages": null}, "macro.dbt.default__array_construct": {"unique_id": "macro.dbt.default__array_construct", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "name": "default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.027744, "supported_languages": null}, "macro.dbt.array_append": {"unique_id": "macro.dbt.array_append", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "name": "array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.028975, "supported_languages": null}, "macro.dbt.default__array_append": {"unique_id": "macro.dbt.default__array_append", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "name": "default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0293288, "supported_languages": null}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.030483, "supported_languages": null}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0309591, "supported_languages": null}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.031368, "supported_languages": null}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0318341, "supported_languages": null}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.033173, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.033555, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0339258, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.034244, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"unique_id": "macro.dbt.current_timestamp_backcompat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.034715, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"unique_id": "macro.dbt.default__current_timestamp_backcompat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0349262, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.035347, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.035771, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0371048, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.037695, "supported_languages": null}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.038136, "supported_languages": null}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.039148, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"unique_id": "macro.dbt.make_intermediate_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0456681, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"unique_id": "macro.dbt.default__make_intermediate_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.046104, "supported_languages": null}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0466611, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.047403, "supported_languages": null}, "macro.dbt.make_backup_relation": {"unique_id": "macro.dbt.make_backup_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0480168, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"unique_id": "macro.dbt.default__make_backup_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.048809, "supported_languages": null}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.049268, "supported_languages": null}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.049806, "supported_languages": null}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.050257, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.050667, "supported_languages": null}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.051177, "supported_languages": null}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.051893, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.052501, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"unique_id": "macro.dbt.default__get_or_create_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.053866, "supported_languages": null}, "macro.dbt.load_cached_relation": {"unique_id": "macro.dbt.load_cached_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.054573, "supported_languages": null}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0549362, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.055425, "supported_languages": null}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0570502, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0581172, "supported_languages": null}, "macro.dbt.copy_grants": {"unique_id": "macro.dbt.copy_grants", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.06205, "supported_languages": null}, "macro.dbt.default__copy_grants": {"unique_id": "macro.dbt.default__copy_grants", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.062446, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.062894, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.063191, "supported_languages": null}, "macro.dbt.should_revoke": {"unique_id": "macro.dbt.should_revoke", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.064117, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"unique_id": "macro.dbt.get_show_grant_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0646179, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"unique_id": "macro.dbt.default__get_show_grant_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0649018, "supported_languages": null}, "macro.dbt.get_grant_sql": {"unique_id": "macro.dbt.get_grant_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.065465, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"unique_id": "macro.dbt.default__get_grant_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0659552, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"unique_id": "macro.dbt.get_revoke_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.066529, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"unique_id": "macro.dbt.default__get_revoke_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0670002, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"unique_id": "macro.dbt.get_dcl_statement_list", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0675662, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"unique_id": "macro.dbt.default__get_dcl_statement_list", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.069387, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"unique_id": "macro.dbt.call_dcl_statements", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.070018, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"unique_id": "macro.dbt.default__call_dcl_statements", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.070652, "supported_languages": null}, "macro.dbt.apply_grants": {"unique_id": "macro.dbt.apply_grants", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.071306, "supported_languages": null}, "macro.dbt.default__apply_grants": {"unique_id": "macro.dbt.default__apply_grants", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.074133, "supported_languages": null}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.07601, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.076446, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0769558, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.077386, "supported_languages": null}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.078047, "supported_languages": null}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0792341, "supported_languages": null}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0824351, "supported_languages": null}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0830872, "supported_languages": null}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.083549, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.083961, "supported_languages": null}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0844128, "supported_languages": null}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.085026, "supported_languages": null}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0855522, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0863092, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.086777, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.087187, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.09098, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0918489, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.092635, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0931032, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.09394, "supported_languages": null}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.094506, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0959759, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.096619, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.098658, "supported_languages": null}, "macro.dbt.build_ref_function": {"unique_id": "macro.dbt.build_ref_function", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {%- set resolved = ref(*_ref) -%}\n {%- do ref_dict.update({_ref | join(\".\"): resolved.quote(database=False, schema=False, identifier=False) | string}) -%}\n {%- endfor -%}\n\ndef ref(*args,dbt_load_df_function):\n refs = {{ ref_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.10208, "supported_languages": null}, "macro.dbt.build_source_function": {"unique_id": "macro.dbt.build_source_function", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join(\".\"): resolved.quote(database=False, schema=False, identifier=False) | string}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.103234, "supported_languages": null}, "macro.dbt.build_config_dict": {"unique_id": "macro.dbt.build_config_dict", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {%- for key in model.config.config_keys_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == 'language' -%}\n {%- set value = 'python' -%}\n {%- endif -%}\n {%- set value = model.config[key] -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.104405, "supported_languages": null}, "macro.dbt.py_script_postfix": {"unique_id": "macro.dbt.py_script_postfix", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = '{{ this.database }}'\n schema = '{{ this.schema }}'\n identifier = '{{ this.identifier }}'\n def __repr__(self):\n return '{{ this }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args: ref(*args, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.105411, "supported_languages": null}, "macro.dbt.py_script_comment": {"unique_id": "macro.dbt.py_script_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.105611, "supported_languages": null}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.107241, "supported_languages": null}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.10783, "supported_languages": null}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.108544, "supported_languages": null}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.109239, "supported_languages": null}}, "docs": {"test.somedoc": {"unique_id": "test.somedoc", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "name": "somedoc", "block_contents": "Testing, testing"}, "dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"fqn": ["test", "simple_exposure"], "unique_id": "exposure.test.simple_exposure", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "schema.yml", "original_file_path": "models/schema.yml", "name": "simple_exposure", "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "resource_type": "exposure", "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [["my_model"]], "sources": [["my_source", "my_table"]], "created_at": 1676324075.609121}}, "metrics": {"metric.test.my_metric": {"fqn": ["test", "my_metric"], "unique_id": "metric.test.my_metric", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "schema.yml", "original_file_path": "models/schema.yml", "name": "my_metric", "description": "", "label": "Count records", "calculation_method": "count", "timestamp": "updated_at", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "window": null, "model": "ref('my_model')", "model_unique_id": null, "resource_type": "metric", "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "metrics": [], "created_at": 1676324075.618992}}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "disabled_model"], "unique_id": "model.test.disabled_model", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "name": "disabled_model", "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "34f7b8e60d9e7933469c48d6c92b0a53918d0ba626a9ce2c30ab2f1532145827"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.4071748, "config_call_dict": {"enabled": false}}], "snapshot.test.disabled_snapshot_seed": [{"resource_type": "snapshot", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "strategy": "check", "target_schema": "test16763240740000063267_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "unique_id": "snapshot.test.disabled_snapshot_seed", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "name": "disabled_snapshot_seed", "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "52b08465e16dcbc364162dfbdb34cf25e04295bc13d63ab0b420f60d15234c76"}, "tags": [], "refs": [["my_seed"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763240740000063267_test_previous_version_state", "enabled": false}, "created_at": 1676324075.4334059, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763240740000063267_test_previous_version_state", "enabled": false}}], "analysis.test.disabled_al": [{"resource_type": "analysis", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "analysis", "disabled_al"], "unique_id": "analysis.test.disabled_al", "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "name": "disabled_al", "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "76b8579816eac97721616fd429dcd1a93c311c6358830a65d40ebe5661572610"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.4483929, "config_call_dict": {"enabled": false}}], "test.test.disabled_just_my": [{"resource_type": "test", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "disabled_just_my"], "unique_id": "test.test.disabled_just_my", "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "name": "disabled_just_my", "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "0b5827d08d1e3c97e8fb865bea00031b2e90ecef7884a42429cc48d0f48b8c20"}, "tags": [], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.5147672, "config_call_dict": {"enabled": false}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "resource_type": "test", "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "disabled_check_nothing_my_model_"], "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "name": "disabled_check_nothing_my_model_", "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "tags": [], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.587028, "config_call_dict": {"enabled": false}, "column_name": null, "file_key_name": "models.my_model"}], "exposure.test.disabled_exposure": [{"fqn": ["test", "disabled_exposure"], "unique_id": "exposure.test.disabled_exposure", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "schema.yml", "original_file_path": "models/schema.yml", "name": "disabled_exposure", "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "resource_type": "exposure", "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [["my_model"]], "sources": [], "created_at": 1676324075.612152}], "metric.test.disabled_metric": [{"fqn": ["test", "disabled_metric"], "unique_id": "metric.test.disabled_metric", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "schema.yml", "original_file_path": "models/schema.yml", "name": "disabled_metric", "description": "", "label": "Count records", "calculation_method": "count", "timestamp": "updated_at", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "window": null, "model": "ref('my_model')", "model_unique_id": null, "resource_type": "metric", "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [["my_model"]], "metrics": [], "created_at": 1676324075.622605}], "seed.test.disabled_seed": [{"resource_type": "seed", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "disabled_seed"], "unique_id": "seed.test.disabled_seed", "raw_code": "", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "name": "disabled_seed", "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "c6c08a913b5a382014ef0ba248d97b12fc801beb369fdbd24aff1a3912ee3773"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.594186, "config_call_dict": {}}], "source.test.my_source.disabled_table": [{"fqn": ["test", "my_source", "disabled_table"], "database": "dbt", "schema": "my_source", "unique_id": "source.test.my_source.disabled_table", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "name": "disabled_table", "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "resource_type": "source", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1676324075.625102}]}, "parent_map": {"model.test.my_model": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "metric.test.my_metric", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v8/manifest.json b/tests/functional/artifacts/data/state/v8/manifest.json new file mode 100644 index 000000000..df5c8738e --- /dev/null +++ b/tests/functional/artifacts/data/state/v8/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v8.json", "dbt_version": "1.5.0a1", "generated_at": "2023-02-13T21:04:43.788883Z", "invocation_id": "c7896040-31e1-487d-8438-19d703edb137", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "2b9123e04ab8bb798f7c565afdc3ee0e56fcd66b4bfbdb435b4891c878d947c5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.4291918, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "00c13c306831070996970605fbc4c901aa456e1ed1c028725a932e4e6a4ffb0a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "strategy": "check", "target_schema": "test16763222812618906995_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763222812618906995_test_previous_version_state"}, "created_at": 1676322282.28191, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [["my_seed"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null}, "analysis.test.a": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "bd1ee600e4e80d03f488fee52a66e8d51b5be2b98acc20df1cf8be4670d86ae5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.338664, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}, "test.test.just_my": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "f30b7a814e0e3761d1a8042aa40d658d6c33affb28cd92782b0f56559c414fd8"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1676322282.365304, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null}, "seed.test.my_seed": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "22697c9b76d73a6c7561554ddb2ce101428ea2737ba8dc500d52ebcfdcfcfc13"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.395373, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-115/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.439473, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "column_name": "id", "file_key_name": "models.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.4446359, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "column_name": null, "file_key_name": "models.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1676322282.498101}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4094772, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.410033, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.41051, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.411176, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.411718, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.412009, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.41232, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.412619, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4152992, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4168088, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.428651, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.430589, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4315221, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.432323, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.433569, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.434568, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4350138, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.435891, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.436857, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4389682, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.439469, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4407659, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.441967, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.443386, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4441102, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.446302, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4471622, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.447847, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.449656, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.452299, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.453088, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.455125, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.462395, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.463126, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.464517, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4655108, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.468192, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.468693, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.469058, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4694211, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.469785, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.470596, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.471361, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.472131, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.473068, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4737349, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.481837, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4828649, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4835358, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4857202, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4861922, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.486644, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_code'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.490088, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4935129, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5012, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.501941, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.502363, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5025961, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.502961, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5032582, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.50385, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5059588, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5064478, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5070798, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.508459, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.523073, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.528616, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.52976, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.531104, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5320342, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.533005, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.536449, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.537867, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.539545, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5401359, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5423229, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.554441, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.558579, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.559229, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.561732, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.56239, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.564075, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.565938, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.567849, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.568505, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5689778, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.569709, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.570188, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5709162, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.57139, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.572049, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.572522, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5729191, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.57362, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.585784, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.59618, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.60002, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.603049, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.605203, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.611255, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.612431, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6129012, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.613973, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.614995, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.621283, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6220968, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.622665, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.626231, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.627094, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.627518, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.627975, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.628621, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6366222, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6457422, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.648115, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.648722, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.650075, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.65061, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6509619, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6513228, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6516201, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.652027, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6523268, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6536052, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.654092, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.657616, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.658516, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.659103, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.660212, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.660863, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.661577, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.662461, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6631, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.664021, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.664862, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.665466, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.666805, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.670157, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.671575, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.672309, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.675864, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.678925, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.680963, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.681591, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.682165, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.682366, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6831412, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.683658, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.684277, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6846101, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.685228, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.685508, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6863098, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6870232, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.687619, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6878238, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.688459, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.688828, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.689515, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.689863, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.691304, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6924748, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.693249, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.69366, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.694339, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6946921, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.695292, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.695692, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.696287, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.696754, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.697381, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6978078, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.698524, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6990662, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.69984, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.700127, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.702124, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.702521, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.702935, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.703326, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.703733, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.704247, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7046552, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.705098, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.705503, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.705885, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7063448, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7067552, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7071831, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.707561, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.708241, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7085838, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.709186, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7094588, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7102468, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.710925, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.711309, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.712547, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.71298, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.713571, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.714258, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.714612, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7155101, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.716278, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7171369, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.717506, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.718344, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.71883, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7192378, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.719709, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.720729, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7211258, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.721498, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.721788, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.722215, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7224221, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7228422, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.723277, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.724287, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7246542, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.725062, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.726084, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.732279, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.732851, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7335029, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.734247, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7348611, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7356532, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7361112, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7366579, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7371142, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.737532, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.738038, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7387478, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7393658, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.740736, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.741308, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.741668, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7421598, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.743146, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7441761, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7475011, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7478158, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.748252, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7485409, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.749697, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.750331, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7506151, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.751184, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7516642, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.752241, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.752723, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7532978, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7553658, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.756046, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7566988, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7572742, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.760213, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.761635, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.762066, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.762573, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.762995, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7636638, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.76503, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.767971, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7686348, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.769115, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.769671, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.770122, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.770835, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.771361, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.772129, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.772596, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7730088, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.776206, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.776611, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.777584, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.77807, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.77891, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.77949, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7810528, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.781748, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7839968, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {%- set resolved = ref(*_ref) -%}\n {%- do ref_dict.update({_ref | join(\".\"): resolved | string | replace('\"', '\\\"')}) -%}\n {%- endfor -%}\n\ndef ref(*args,dbt_load_df_function):\n refs = {{ ref_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.787274, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join(\".\"): resolved | string | replace('\"', '\\\"')}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.788432, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.790293, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = this | string | replace('\"', '\\\\\"') %}\n def __repr__(self):\n return \"{{ this_relation_name }}\"\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args: ref(*args, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.791504, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.791784, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7929192, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.793498, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.794215, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7948952, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [["my_model"]], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1676322282.478955}}, "metrics": {"metric.test.my_metric": {"name": "my_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.my_metric", "fqn": ["test", "my_metric"], "description": "", "label": "Count records", "calculation_method": "count", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "timestamp": "updated_at", "window": null, "model": "ref('my_model')", "model_unique_id": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "metrics": [], "created_at": 1676322282.491698}}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "34f7b8e60d9e7933469c48d6c92b0a53918d0ba626a9ce2c30ab2f1532145827"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.224511, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "52b08465e16dcbc364162dfbdb34cf25e04295bc13d63ab0b420f60d15234c76"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "strategy": "check", "target_schema": "test16763222812618906995_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763222812618906995_test_previous_version_state", "enabled": false}, "created_at": 1676322282.303265, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763222812618906995_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [["my_seed"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "76b8579816eac97721616fd429dcd1a93c311c6358830a65d40ebe5661572610"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.3320582, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "0b5827d08d1e3c97e8fb865bea00031b2e90ecef7884a42429cc48d0f48b8c20"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.359573, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.455549, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "column_name": null, "file_key_name": "models.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [["my_model"]], "sources": [], "metrics": [], "created_at": 1676322282.482795}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "calculation_method": "count", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "timestamp": "updated_at", "window": null, "model": "ref('my_model')", "model_unique_id": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [["my_model"]], "metrics": [], "created_at": 1676322282.495338}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "c6c08a913b5a382014ef0ba248d97b12fc801beb369fdbd24aff1a3912ee3773"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.462719, "config_call_dict": {}, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-115/project0", "depends_on": {"macros": []}}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1676322282.498409}]}, "parent_map": {"model.test.my_model": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "metric.test.my_metric", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v9/manifest.json b/tests/functional/artifacts/data/state/v9/manifest.json new file mode 100644 index 000000000..febb34712 --- /dev/null +++ b/tests/functional/artifacts/data/state/v9/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v9.json", "dbt_version": "1.5.0b5", "generated_at": "2023-04-10T02:53:50.434615Z", "invocation_id": "7e6390ca-c227-4a45-b9e0-85eeb260e9a8", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.898038, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "version": null, "is_latest_version": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16810952296205305560_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16810952296205305560_test_previous_version_state"}, "created_at": 1681095229.843765, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.8655732, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1681095229.884334, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.889285, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/k6/gtt07v8j2vn51m_z05xk_fjc0000gp/T/pytest-of-michelleark/pytest-80/project5", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.898516, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.900049, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1681095229.938866}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.549095, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.549314, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.549501, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5497909, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.549994, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5501041, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5502121, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5503209, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.551399, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.552023, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_columns_spec_ddl() }} ;\n insert into {{ relation }} {{ get_column_names() }}\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_columns_spec_ddl", "macro.dbt_postgres.get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5590951, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.559607, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5599282, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.560247, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5607271, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5611079, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.561283, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.561631, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.56203, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.562905, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5631082, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.563437, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.563714, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.564138, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.564367, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5649762, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.565191, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.565307, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.565693, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5664032, "supported_languages": null}, "macro.dbt_postgres.get_column_names": {"name": "get_column_names", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/columns_spec_ddl.sql", "original_file_path": "macros/utils/columns_spec_ddl.sql", "unique_id": "macro.dbt_postgres.get_column_names", "macro_sql": "{% macro get_column_names() %}\n {# loop through user_provided_columns to get column names #}\n {%- set user_provided_columns = model['columns'] -%}\n (\n {% for i in user_provided_columns %}\n {% set col = user_provided_columns[i] %}\n {{ col['name'] }} {{ \",\" if not loop.last }}\n {% endfor %}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.566866, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.567093, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5678, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.571049, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5712, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.571703, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.572116, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.573218, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.573418, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.57356, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5737019, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.573847, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5742688, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5745878, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.574906, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.575354, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5756302, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.579268, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.579456, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.579693, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5804448, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5806148, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5807948, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_code'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5822341, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.583667, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.587839, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.58813, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5883, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.588392, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5885382, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.588657, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.588872, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5897758, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.589972, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.590228, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5906692, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5969589, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.599081, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5995462, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.599873, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.600277, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.600673, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6024802, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.603078, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.603765, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6040099, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.604749, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.61131, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.613036, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6133099, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.614329, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6146078, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.615274, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6159291, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.616825, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.617065, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6172569, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.617557, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6177459, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.618047, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.618237, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.618508, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6187038, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.618857, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.619138, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.624626, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.630311, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.631569, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6327949, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.633666, "supported_languages": null}, "macro.dbt.get_columns_spec_ddl": {"name": "get_columns_spec_ddl", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_columns_spec_ddl", "macro_sql": "{%- macro get_columns_spec_ddl() -%}\n {{ adapter.dispatch('get_columns_spec_ddl', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_columns_spec_ddl"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.634799, "supported_languages": null}, "macro.dbt.default__get_columns_spec_ddl": {"name": "default__get_columns_spec_ddl", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_columns_spec_ddl", "macro_sql": "{% macro default__get_columns_spec_ddl() -%}\n {{ return(columns_spec_ddl()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.columns_spec_ddl"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.634922, "supported_languages": null}, "macro.dbt.columns_spec_ddl": {"name": "columns_spec_ddl", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.columns_spec_ddl", "macro_sql": "{% macro columns_spec_ddl() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set user_provided_columns = model['columns'] -%}\n (\n {% for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set constraints = col['constraints'] -%}\n {{ col['name'] }} {{ col['data_type'] }}{% for c in constraints %} {{ adapter.render_raw_column_constraint(c) }}{% endfor %}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6354618, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6356301, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.635766, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(model['columns'])) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set string_sql_columns = stringify_formatted_columns(sql_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n {%- set string_yaml_columns = stringify_formatted_columns(yaml_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(string_yaml_columns, string_sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(string_yaml_columns, string_sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(string_yaml_columns, string_sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns", "macro.dbt.stringify_formatted_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.636966, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append({'name': column.name, 'formatted': formatted_column}) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.637417, "supported_languages": null}, "macro.dbt.stringify_formatted_columns": {"name": "stringify_formatted_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.stringify_formatted_columns", "macro_sql": "{% macro stringify_formatted_columns(formatted_columns) %}\n {% set column_strings = [] %}\n {% for column in formatted_columns %}\n {% do column_strings.append(column['formatted']) %}\n {% endfor %}\n {{ return(column_strings|join(', ')) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6377542, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {{ return(column.column.lower() ~ \" \" ~ column.dtype) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6379352, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.640608, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.641506, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.641694, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.642134, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_columns_spec_ddl() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_columns_spec_ddl", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.642807, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.643002, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select\n {% for column in model['columns'] %}\n {{ column }}{{ \", \" if not loop.last }}\n {% endfor %}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.643258, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.645857, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.64616, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6463842, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6479082, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.648345, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.648515, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6487029, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6491442, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.652572, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.657813, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.65873, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.658967, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.659456, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.659651, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.659791, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.659935, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6600509, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6602108, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6603289, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.660815, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6610072, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.66231, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.662733, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ node.version) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6630669, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6635892, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.663862, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.664153, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6645498, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.664808, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6651552, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.66545, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.665698, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6662662, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.667764, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6684191, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.668716, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.670603, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.671814, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6726348, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.672904, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6731641, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.673249, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.673606, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.673785, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.674051, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6741881, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.674554, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.674687, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.675072, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6752498, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.675481, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.675561, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6758258, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6759732, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.676275, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6765099, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6771588, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.677581, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.677927, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.678091, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6783779, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6785228, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6787798, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6789498, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.679196, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6793652, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6796181, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6797252, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.680025, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6801689, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.680419, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6805272, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.681462, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.681621, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6818528, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6820118, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6821811, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.682334, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6826391, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.682859, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683039, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683199, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683383, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683554, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683734, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683891, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.684203, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.684342, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6845891, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.684702, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.685059, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.685353, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.685507, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6860402, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.686302, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.686583, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6868732, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.687006, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.687397, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.687667, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.687964, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.688101, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.688492, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.688687, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.688858, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.689048, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6895611, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.689718, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.689871, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6899788, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.690153, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.690235, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6904068, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.690679, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.691232, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6913862, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.69156, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.692, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.695603, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6958349, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6960871, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.696411, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6966882, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.697024, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6972108, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.697429, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6976268, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.697804, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.69802, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6983092, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.698685, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6992798, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.699531, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.699724, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.699935, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.700392, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7008178, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7027688, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.702912, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7031, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.703357, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7037492, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7039511, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7040598, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.704402, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.704654, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.704913, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.705131, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.705385, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7062578, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.706471, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.706742, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.707036, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.708344, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7090821, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.709265, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7094889, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7096682, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.709949, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7104452, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.711974, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.712246, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.712435, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7125928, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.712777, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7130291, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7132502, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.713568, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.713761, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.713928, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.716088, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7163389, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.716653, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.71684, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql) %}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.71695, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.717133, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {{ col_err.append(col['name']) }}\n {%- endif -%}\n cast(null as {{ col['data_type'] }}) as {{ col['name'] }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.717796, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.718113, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7183008, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7186701, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.718902, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.719517, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7197812, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.720577, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7219672, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.722124, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {%- set resolved = ref(*_ref) -%}\n {%- do ref_dict.update({_ref | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args,dbt_load_df_function):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.722541, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.722951, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.723784, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args: ref(*args, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.724266, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.724343, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.724848, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7250812, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.725373, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.725664, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1681095229.907179}}, "metrics": {"metric.test.my_metric": {"name": "my_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.my_metric", "fqn": ["test", "my_metric"], "description": "", "label": "Count records", "calculation_method": "count", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "timestamp": "updated_at", "window": null, "model": "ref('my_model')", "model_unique_id": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "metrics": [], "created_at": 1681095229.936167, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.840684, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "version": null, "is_latest_version": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16810952296205305560_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16810952296205305560_test_previous_version_state", "enabled": false}, "created_at": 1681095229.846447, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16810952296205305560_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.863457, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.882241, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.9022238, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1681095229.9083421}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "calculation_method": "count", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "timestamp": "updated_at", "window": null, "model": "ref('my_model')", "model_unique_id": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "metrics": [], "created_at": 1681095229.937823, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.905121, "config_call_dict": {}, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/k6/gtt07v8j2vn51m_z05xk_fjc0000gp/T/pytest-of-michelleark/pytest-80/project5", "depends_on": {"macros": []}}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1681095229.939002}]}, "parent_map": {"model.test.my_model": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "metric.test.my_metric", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.my_metric": []}, "group_map": {}} diff --git a/tests/functional/artifacts/expected_manifest.py b/tests/functional/artifacts/expected_manifest.py new file mode 100644 index 000000000..7c352bfb6 --- /dev/null +++ b/tests/functional/artifacts/expected_manifest.py @@ -0,0 +1,1939 @@ +import hashlib +import os +from unittest.mock import ANY + +import dbt +from dbt.tests.util import AnyStringWith + + +# This produces an "expected manifest", with a number of the fields +# modified to avoid ephemeral changes. +# ANY +# AnyStringWith +# LineIndifferent +# It also uses some convenience methods to generate the +# various config dictionaries. + + +def get_rendered_model_config(**updates): + result = { + "database": None, + "schema": None, + "alias": None, + "enabled": True, + "group": None, + "materialized": "view", + "pre-hook": [], + "post-hook": [], + "column_types": {}, + "quoting": {}, + "tags": [], + "persist_docs": {}, + "full_refresh": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "meta": {}, + "unique_key": None, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + } + result.update(updates) + return result + + +def get_unrendered_model_config(**updates): + return updates + + +def get_rendered_seed_config(**updates): + result = { + "enabled": True, + "group": None, + "materialized": "seed", + "persist_docs": {}, + "pre-hook": [], + "post-hook": [], + "column_types": {}, + "delimiter": ",", + "quoting": {}, + "tags": [], + "quote_columns": True, + "full_refresh": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "unique_key": None, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + } + result.update(updates) + return result + + +def get_unrendered_seed_config(**updates): + result = {"quote_columns": True} + result.update(updates) + return result + + +def get_rendered_snapshot_config(**updates): + result = { + "database": None, + "schema": None, + "alias": None, + "enabled": True, + "group": None, + "materialized": "snapshot", + "pre-hook": [], + "post-hook": [], + "column_types": {}, + "quoting": {}, + "tags": [], + "persist_docs": {}, + "full_refresh": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "strategy": "check", + "check_cols": "all", + "unique_key": "id", + "target_database": None, + "target_schema": None, + "updated_at": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + } + result.update(updates) + return result + + +def get_unrendered_snapshot_config(**updates): + result = {"check_cols": "all", "strategy": "check", "target_schema": None, "unique_key": "id"} + result.update(updates) + return result + + +def get_rendered_tst_config(**updates): + result = { + "enabled": True, + "group": None, + "materialized": "test", + "tags": [], + "severity": "ERROR", + "store_failures": None, + "store_failures_as": None, + "warn_if": "!= 0", + "error_if": "!= 0", + "fail_calc": "count(*)", + "where": None, + "limit": None, + "database": None, + "schema": "dbt_test__audit", + "alias": None, + "meta": {}, + } + result.update(updates) + return result + + +def get_unrendered_tst_config(**updates): + result = {} + result.update(updates) + return result + + +def quote(value): + quote_char = '"' + return "{0}{1}{0}".format(quote_char, value) + + +def relation_name_format(quote_database: bool, quote_schema: bool, quote_identifier: bool): + return ".".join( + ( + quote("{0}") if quote_database else "{0}", + quote("{1}") if quote_schema else "{1}", + quote("{2}") if quote_identifier else "{2}", + ) + ) + + +def checksum_file(path): + """windows has silly git behavior that adds newlines, and python does + silly things if we just open(..., 'r').encode('utf-8'). + """ + with open(path, "rb") as fp: + # We strip the file contents because we want the checksum to match the stored contents + hashed = hashlib.sha256(fp.read().strip()).hexdigest() + return { + "name": "sha256", + "checksum": hashed, + } + + +def read_file_replace_returns(path): + with open(path, "r") as fp: + return fp.read().replace("\r", "").replace("\n", "") + + +class LineIndifferent: + def __init__(self, expected): + self.expected = expected.replace("\r", "") + + def __eq__(self, other): + got = other.replace("\r", "").replace("\n", "") + return self.expected == got + + def __repr__(self): + return "LineIndifferent({!r})".format(self.expected) + + def __str__(self): + return self.__repr__() + + +def expected_seeded_manifest(project, model_database=None, quote_model=False): + + model_sql_path = os.path.join("models", "model.sql") + second_model_sql_path = os.path.join("models", "second_model.sql") + model_schema_yml_path = os.path.join("models", "schema.yml") + seed_schema_yml_path = os.path.join("seeds", "schema.yml") + seed_path = os.path.join("seeds", "seed.csv") + snapshot_path = os.path.join("snapshots", "snapshot_seed.sql") + + my_schema_name = project.test_schema + alternate_schema = project.test_schema + "_test" + test_audit_schema = my_schema_name + "_dbt_test__audit" + + model_database = project.database + + model_config = get_rendered_model_config(docs={"node_color": None, "show": False}) + second_config = get_rendered_model_config( + schema="test", docs={"node_color": None, "show": False} + ) + + unrendered_model_config = get_unrendered_model_config( + materialized="view", docs={"show": False} + ) + + unrendered_second_config = get_unrendered_model_config( + schema="test", materialized="view", docs={"show": False} + ) + + seed_config = get_rendered_seed_config() + unrendered_seed_config = get_unrendered_seed_config() + + test_config = get_rendered_tst_config() + unrendered_test_config = get_unrendered_tst_config() + + snapshot_config = get_rendered_snapshot_config(target_schema=alternate_schema) + unrendered_snapshot_config = get_unrendered_snapshot_config(target_schema=alternate_schema) + + quote_database = quote_schema = True + relation_name_node_format = relation_name_format(quote_database, quote_schema, quote_model) + relation_name_source_format = relation_name_format( + quote_database, quote_schema, quote_identifier=True + ) + + compiled_model_path = os.path.join("target", "compiled", "test", "models") + + model_raw_code = read_file_replace_returns(model_sql_path).rstrip("\r\n") + + return { + "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v7.json", + "dbt_version": dbt.version.__version__, + "nodes": { + "model.test.model": { + "compiled_path": os.path.join(compiled_model_path, "model.sql"), + "build_path": None, + "created_at": ANY, + "name": "model", + "relation_name": relation_name_node_format.format( + model_database, my_schema_name, "model" + ), + "resource_type": "model", + "path": "model.sql", + "original_file_path": model_sql_path, + "package_name": "test", + "raw_code": LineIndifferent(model_raw_code), + "language": "sql", + "refs": [{"name": "seed", "package": None, "version": None}], + "sources": [], + "depends_on": {"nodes": ["seed.test.seed"], "macros": []}, + "deprecation_date": None, + "unique_id": "model.test.model", + "fqn": ["test", "model"], + "metrics": [], + "tags": [], + "meta": {}, + "config": model_config, + "group": None, + "schema": my_schema_name, + "database": model_database, + "deferred": False, + "alias": "model", + "description": "The test model", + "columns": { + "id": { + "name": "id", + "description": "The user ID number", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "first_name": { + "name": "first_name", + "description": "The user's first name", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "email": { + "name": "email", + "description": "The user's email", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "ip_address": { + "name": "ip_address", + "description": "The user's IP address", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "updated_at": { + "name": "updated_at", + "description": "The last time this user's email was updated", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + }, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "constraints": [], + "patch_path": "test://" + model_schema_yml_path, + "docs": {"node_color": None, "show": False}, + "compiled": True, + "compiled_code": ANY, + "extra_ctes_injected": True, + "extra_ctes": [], + "checksum": checksum_file(model_sql_path), + "unrendered_config": unrendered_model_config, + "access": "protected", + "version": None, + "latest_version": None, + }, + "model.test.second_model": { + "compiled_path": os.path.join(compiled_model_path, "second_model.sql"), + "build_path": None, + "created_at": ANY, + "name": "second_model", + "relation_name": relation_name_node_format.format( + project.database, alternate_schema, "second_model" + ), + "resource_type": "model", + "path": "second_model.sql", + "original_file_path": second_model_sql_path, + "package_name": "test", + "raw_code": LineIndifferent( + read_file_replace_returns(second_model_sql_path).rstrip("\r\n") + ), + "language": "sql", + "refs": [{"name": "seed", "package": None, "version": None}], + "sources": [], + "depends_on": {"nodes": ["seed.test.seed"], "macros": []}, + "deprecation_date": None, + "unique_id": "model.test.second_model", + "fqn": ["test", "second_model"], + "metrics": [], + "tags": [], + "meta": {}, + "config": second_config, + "group": None, + "schema": alternate_schema, + "database": project.database, + "deferred": False, + "alias": "second_model", + "description": "The second test model", + "columns": { + "id": { + "name": "id", + "description": "The user ID number", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "first_name": { + "name": "first_name", + "description": "The user's first name", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "email": { + "name": "email", + "description": "The user's email", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "ip_address": { + "name": "ip_address", + "description": "The user's IP address", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "updated_at": { + "name": "updated_at", + "description": "The last time this user's email was updated", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + }, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "constraints": [], + "patch_path": "test://" + model_schema_yml_path, + "docs": {"node_color": None, "show": False}, + "compiled": True, + "compiled_code": ANY, + "extra_ctes_injected": True, + "extra_ctes": [], + "checksum": checksum_file(second_model_sql_path), + "unrendered_config": unrendered_second_config, + "access": "protected", + "version": None, + "latest_version": None, + }, + "seed.test.seed": { + "build_path": None, + "created_at": ANY, + "config": seed_config, + "group": None, + "patch_path": "test://" + seed_schema_yml_path, + "path": "seed.csv", + "name": "seed", + "root_path": project.project_root, + "resource_type": "seed", + "raw_code": "", + "package_name": "test", + "original_file_path": seed_path, + "unique_id": "seed.test.seed", + "fqn": ["test", "seed"], + "tags": [], + "meta": {}, + "depends_on": {"macros": []}, + "schema": my_schema_name, + "database": project.database, + "alias": "seed", + "deferred": False, + "description": "The test seed", + "columns": { + "id": { + "name": "id", + "description": "The user ID number", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "first_name": { + "name": "first_name", + "description": "The user's first name", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "email": { + "name": "email", + "description": "The user's email", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "ip_address": { + "name": "ip_address", + "description": "The user's IP address", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "updated_at": { + "name": "updated_at", + "description": "The last time this user's email was updated", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + }, + "docs": {"node_color": None, "show": True}, + "checksum": checksum_file(seed_path), + "unrendered_config": unrendered_seed_config, + "relation_name": relation_name_node_format.format( + project.database, my_schema_name, "seed" + ), + }, + "test.test.not_null_model_id.d01cc630e6": { + "alias": "not_null_model_id", + "attached_node": "model.test.model", + "compiled_path": os.path.join( + compiled_model_path, "schema.yml", "not_null_model_id.sql" + ), + "build_path": None, + "created_at": ANY, + "column_name": "id", + "columns": {}, + "config": test_config, + "sources": [], + "group": None, + "depends_on": { + "macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], + "nodes": ["model.test.model"], + }, + "deferred": False, + "description": "", + "file_key_name": "models.model", + "fqn": ["test", "not_null_model_id"], + "metrics": [], + "name": "not_null_model_id", + "original_file_path": model_schema_yml_path, + "package_name": "test", + "patch_path": None, + "path": "not_null_model_id.sql", + "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", + "language": "sql", + "refs": [{"name": "model", "package": None, "version": None}], + "relation_name": None, + "resource_type": "test", + "schema": test_audit_schema, + "database": project.database, + "tags": [], + "meta": {}, + "unique_id": "test.test.not_null_model_id.d01cc630e6", + "docs": {"node_color": None, "show": True}, + "compiled": True, + "compiled_code": AnyStringWith("where id is null"), + "extra_ctes_injected": True, + "extra_ctes": [], + "test_metadata": { + "namespace": None, + "name": "not_null", + "kwargs": { + "column_name": "id", + "model": "{{ get_where_subquery(ref('model')) }}", + }, + }, + "checksum": {"name": "none", "checksum": ""}, + "unrendered_config": unrendered_test_config, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + }, + "snapshot.test.snapshot_seed": { + "alias": "snapshot_seed", + "compiled_path": None, + "build_path": None, + "created_at": ANY, + "checksum": checksum_file(snapshot_path), + "columns": {}, + "compiled": True, + "compiled_code": ANY, + "config": snapshot_config, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "database": project.database, + "group": None, + "deferred": False, + "depends_on": { + "macros": [], + "nodes": ["seed.test.seed"], + }, + "description": "", + "docs": {"node_color": None, "show": True}, + "extra_ctes": [], + "extra_ctes_injected": True, + "fqn": ["test", "snapshot_seed", "snapshot_seed"], + "metrics": [], + "meta": {}, + "name": "snapshot_seed", + "original_file_path": snapshot_path, + "package_name": "test", + "patch_path": None, + "path": "snapshot_seed.sql", + "raw_code": LineIndifferent( + read_file_replace_returns(snapshot_path) + .replace("{% snapshot snapshot_seed %}", "") + .replace("{% endsnapshot %}", "") + ), + "language": "sql", + "refs": [{"name": "seed", "package": None, "version": None}], + "relation_name": relation_name_node_format.format( + project.database, alternate_schema, "snapshot_seed" + ), + "resource_type": "snapshot", + "schema": alternate_schema, + "sources": [], + "tags": [], + "unique_id": "snapshot.test.snapshot_seed", + "unrendered_config": unrendered_snapshot_config, + }, + "test.test.test_nothing_model_.5d38568946": { + "alias": "test_nothing_model_", + "attached_node": "model.test.model", + "compiled_path": os.path.join( + compiled_model_path, "schema.yml", "test_nothing_model_.sql" + ), + "build_path": None, + "created_at": ANY, + "column_name": None, + "columns": {}, + "config": test_config, + "group": None, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "sources": [], + "depends_on": { + "macros": ["macro.test.test_nothing", "macro.dbt.get_where_subquery"], + "nodes": ["model.test.model"], + }, + "deferred": False, + "description": "", + "file_key_name": "models.model", + "fqn": ["test", "test_nothing_model_"], + "metrics": [], + "name": "test_nothing_model_", + "original_file_path": model_schema_yml_path, + "package_name": "test", + "patch_path": None, + "path": "test_nothing_model_.sql", + "raw_code": "{{ test.test_nothing(**_dbt_generic_test_kwargs) }}", + "language": "sql", + "refs": [{"name": "model", "package": None, "version": None}], + "relation_name": None, + "resource_type": "test", + "schema": test_audit_schema, + "database": project.database, + "tags": [], + "meta": {}, + "unique_id": "test.test.test_nothing_model_.5d38568946", + "docs": {"node_color": None, "show": True}, + "compiled": True, + "compiled_code": AnyStringWith("select 0"), + "extra_ctes_injected": True, + "extra_ctes": [], + "test_metadata": { + "namespace": "test", + "name": "nothing", + "kwargs": { + "model": "{{ get_where_subquery(ref('model')) }}", + }, + }, + "checksum": {"name": "none", "checksum": ""}, + "unrendered_config": unrendered_test_config, + }, + "test.test.unique_model_id.67b76558ff": { + "alias": "unique_model_id", + "attached_node": "model.test.model", + "compiled_path": os.path.join( + compiled_model_path, "schema.yml", "unique_model_id.sql" + ), + "build_path": None, + "created_at": ANY, + "column_name": "id", + "columns": {}, + "config": test_config, + "group": None, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "sources": [], + "depends_on": { + "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], + "nodes": ["model.test.model"], + }, + "deferred": False, + "description": "", + "file_key_name": "models.model", + "fqn": ["test", "unique_model_id"], + "metrics": [], + "name": "unique_model_id", + "original_file_path": model_schema_yml_path, + "package_name": "test", + "patch_path": None, + "path": "unique_model_id.sql", + "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", + "language": "sql", + "refs": [{"name": "model", "package": None, "version": None}], + "relation_name": None, + "resource_type": "test", + "schema": test_audit_schema, + "database": project.database, + "tags": [], + "meta": {}, + "unique_id": "test.test.unique_model_id.67b76558ff", + "docs": {"node_color": None, "show": True}, + "compiled": True, + "compiled_code": AnyStringWith("count(*)"), + "extra_ctes_injected": True, + "extra_ctes": [], + "test_metadata": { + "namespace": None, + "name": "unique", + "kwargs": { + "column_name": "id", + "model": "{{ get_where_subquery(ref('model')) }}", + }, + }, + "checksum": {"name": "none", "checksum": ""}, + "unrendered_config": unrendered_test_config, + }, + }, + "sources": { + "source.test.my_source.my_table": { + "created_at": ANY, + "columns": { + "id": { + "description": "An ID field", + "name": "id", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + } + }, + "config": { + "enabled": True, + }, + "quoting": { + "database": None, + "schema": None, + "identifier": True, + "column": None, + }, + "database": project.database, + "description": "My table", + "external": None, + "freshness": { + "error_after": {"count": None, "period": None}, + "warn_after": {"count": None, "period": None}, + "filter": None, + }, + "identifier": "seed", + "loaded_at_field": None, + "loader": "a_loader", + "meta": {}, + "name": "my_table", + "original_file_path": os.path.join("models", "schema.yml"), + "package_name": "test", + "path": os.path.join("models", "schema.yml"), + "patch_path": None, + "relation_name": relation_name_source_format.format( + project.database, my_schema_name, "seed" + ), + "resource_type": "source", + "schema": my_schema_name, + "source_description": "My source", + "source_name": "my_source", + "source_meta": {}, + "tags": [], + "unique_id": "source.test.my_source.my_table", + "fqn": ["test", "my_source", "my_table"], + "unrendered_config": {}, + }, + }, + "exposures": { + "exposure.test.notebook_exposure": { + "created_at": ANY, + "depends_on": { + "macros": [], + "nodes": ["model.test.model", "model.test.second_model"], + }, + "description": "A description of the complex exposure\n", + "label": None, + "config": { + "enabled": True, + }, + "fqn": ["test", "notebook_exposure"], + "maturity": "medium", + "meta": {"tool": "my_tool", "languages": ["python"]}, + "metrics": [], + "tags": ["my_department"], + "name": "notebook_exposure", + "original_file_path": os.path.join("models", "schema.yml"), + "owner": {"email": "something@example.com", "name": "Some name"}, + "package_name": "test", + "path": "schema.yml", + "refs": [ + {"name": "model", "package": None, "version": None}, + {"name": "second_model", "package": None, "version": None}, + ], + "resource_type": "exposure", + "sources": [], + "type": "notebook", + "unique_id": "exposure.test.notebook_exposure", + "url": "http://example.com/notebook/1", + "unrendered_config": {}, + }, + "exposure.test.simple_exposure": { + "created_at": ANY, + "depends_on": { + "macros": [], + "nodes": ["source.test.my_source.my_table", "model.test.model"], + }, + "description": "", + "label": None, + "config": { + "enabled": True, + }, + "fqn": ["test", "simple_exposure"], + "metrics": [], + "name": "simple_exposure", + "original_file_path": os.path.join("models", "schema.yml"), + "owner": { + "email": "something@example.com", + "name": None, + }, + "package_name": "test", + "path": "schema.yml", + "refs": [{"name": "model", "package": None, "version": None}], + "resource_type": "exposure", + "sources": [["my_source", "my_table"]], + "type": "dashboard", + "unique_id": "exposure.test.simple_exposure", + "url": None, + "maturity": None, + "meta": {}, + "tags": [], + "unrendered_config": {}, + }, + }, + "metrics": {}, + "groups": {}, + "selectors": {}, + "parent_map": { + "model.test.model": ["seed.test.seed"], + "model.test.second_model": ["seed.test.seed"], + "exposure.test.notebook_exposure": ["model.test.model", "model.test.second_model"], + "exposure.test.simple_exposure": [ + "model.test.model", + "source.test.my_source.my_table", + ], + "seed.test.seed": [], + "snapshot.test.snapshot_seed": ["seed.test.seed"], + "source.test.my_source.my_table": [], + "test.test.not_null_model_id.d01cc630e6": ["model.test.model"], + "test.test.test_nothing_model_.5d38568946": ["model.test.model"], + "test.test.unique_model_id.67b76558ff": ["model.test.model"], + }, + "child_map": { + "model.test.model": [ + "exposure.test.notebook_exposure", + "exposure.test.simple_exposure", + "test.test.not_null_model_id.d01cc630e6", + "test.test.test_nothing_model_.5d38568946", + "test.test.unique_model_id.67b76558ff", + ], + "model.test.second_model": ["exposure.test.notebook_exposure"], + "exposure.test.notebook_exposure": [], + "exposure.test.simple_exposure": [], + "seed.test.seed": [ + "model.test.model", + "model.test.second_model", + "snapshot.test.snapshot_seed", + ], + "snapshot.test.snapshot_seed": [], + "source.test.my_source.my_table": ["exposure.test.simple_exposure"], + "test.test.not_null_model_id.d01cc630e6": [], + "test.test.test_nothing_model_.5d38568946": [], + "test.test.unique_model_id.67b76558ff": [], + }, + "group_map": {}, + "docs": { + "doc.dbt.__overview__": ANY, + "doc.test.macro_info": ANY, + "doc.test.macro_arg_info": ANY, + }, + "disabled": {}, + "semantic_models": {}, + "unit_tests": {}, + "saved_queries": {}, + } + + +def expected_references_manifest(project): + model_database = project.database + my_schema_name = project.test_schema + docs_path = os.path.join("models", "docs.md") + ephemeral_copy_path = os.path.join("models", "ephemeral_copy.sql") + ephemeral_summary_path = os.path.join("models", "ephemeral_summary.sql") + view_summary_path = os.path.join("models", "view_summary.sql") + seed_path = os.path.join("seeds", "seed.csv") + snapshot_path = os.path.join("snapshots", "snapshot_seed.sql") + compiled_model_path = os.path.join("target", "compiled", "test", "models") + schema_yml_path = os.path.join("models", "schema.yml") + + ephemeral_copy_sql = read_file_replace_returns(ephemeral_copy_path).rstrip("\r\n") + ephemeral_summary_sql = read_file_replace_returns(ephemeral_summary_path).rstrip("\r\n") + view_summary_sql = read_file_replace_returns(view_summary_path).rstrip("\r\n") + alternate_schema = project.test_schema + "_test" + + return { + "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v7.json", + "dbt_version": dbt.version.__version__, + "nodes": { + "model.test.ephemeral_copy": { + "alias": "ephemeral_copy", + "compiled_path": os.path.join(compiled_model_path, "ephemeral_copy.sql"), + "build_path": None, + "created_at": ANY, + "columns": {}, + "config": get_rendered_model_config(materialized="ephemeral"), + "sources": [["my_source", "my_table"]], + "depends_on": { + "macros": [], + "nodes": ["source.test.my_source.my_table"], + }, + "deprecation_date": None, + "deferred": False, + "description": "", + "docs": {"node_color": None, "show": True}, + "fqn": ["test", "ephemeral_copy"], + "group": None, + "metrics": [], + "name": "ephemeral_copy", + "original_file_path": ephemeral_copy_path, + "package_name": "test", + "patch_path": None, + "path": "ephemeral_copy.sql", + "raw_code": LineIndifferent(ephemeral_copy_sql), + "language": "sql", + "refs": [], + "relation_name": None, + "resource_type": "model", + "schema": my_schema_name, + "database": project.database, + "tags": [], + "meta": {}, + "unique_id": "model.test.ephemeral_copy", + "compiled": True, + "compiled_code": ANY, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "extra_ctes_injected": True, + "extra_ctes": [], + "checksum": checksum_file(ephemeral_copy_path), + "unrendered_config": get_unrendered_model_config(materialized="ephemeral"), + "access": "protected", + "version": None, + "latest_version": None, + "constraints": [], + }, + "model.test.ephemeral_summary": { + "alias": "ephemeral_summary", + "compiled_path": os.path.join(compiled_model_path, "ephemeral_summary.sql"), + "build_path": None, + "created_at": ANY, + "columns": { + "first_name": { + "description": "The first name being summarized", + "name": "first_name", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "ct": { + "description": "The number of instances of the first name", + "name": "ct", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + }, + "config": get_rendered_model_config(materialized="table", group="test_group"), + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "sources": [], + "depends_on": { + "macros": [], + "nodes": ["model.test.ephemeral_copy"], + }, + "deprecation_date": None, + "deferred": False, + "description": "A summmary table of the ephemeral copy of the seed data", + "docs": {"node_color": None, "show": True}, + "fqn": ["test", "ephemeral_summary"], + "group": "test_group", + "metrics": [], + "name": "ephemeral_summary", + "original_file_path": ephemeral_summary_path, + "package_name": "test", + "patch_path": "test://" + os.path.join("models", "schema.yml"), + "path": "ephemeral_summary.sql", + "raw_code": LineIndifferent(ephemeral_summary_sql), + "language": "sql", + "refs": [{"name": "ephemeral_copy", "package": None, "version": None}], + "relation_name": '"{0}"."{1}".ephemeral_summary'.format( + model_database, my_schema_name + ), + "resource_type": "model", + "schema": my_schema_name, + "database": project.database, + "tags": [], + "meta": {}, + "unique_id": "model.test.ephemeral_summary", + "compiled": True, + "compiled_code": ANY, + "extra_ctes_injected": True, + "extra_ctes": [ANY], + "checksum": checksum_file(ephemeral_summary_path), + "unrendered_config": get_unrendered_model_config( + materialized="table", group="test_group" + ), + "access": "protected", + "version": None, + "latest_version": None, + "constraints": [], + }, + "model.test.view_summary": { + "alias": "view_summary", + "compiled_path": os.path.join(compiled_model_path, "view_summary.sql"), + "build_path": None, + "created_at": ANY, + "columns": { + "first_name": { + "description": "The first name being summarized", + "name": "first_name", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "ct": { + "description": "The number of instances of the first name", + "name": "ct", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + }, + "config": get_rendered_model_config(), + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "database": project.database, + "depends_on": { + "macros": [], + "nodes": ["model.test.ephemeral_summary"], + }, + "deprecation_date": None, + "deferred": False, + "description": "A view of the summary of the ephemeral copy of the seed data", + "docs": {"node_color": None, "show": True}, + "fqn": ["test", "view_summary"], + "group": None, + "metrics": [], + "name": "view_summary", + "original_file_path": view_summary_path, + "package_name": "test", + "patch_path": "test://" + schema_yml_path, + "path": "view_summary.sql", + "raw_code": LineIndifferent(view_summary_sql), + "language": "sql", + "refs": [{"name": "ephemeral_summary", "package": None, "version": None}], + "relation_name": '"{0}"."{1}".view_summary'.format(model_database, my_schema_name), + "resource_type": "model", + "schema": my_schema_name, + "sources": [], + "tags": [], + "meta": {}, + "unique_id": "model.test.view_summary", + "compiled": True, + "compiled_code": ANY, + "extra_ctes_injected": True, + "extra_ctes": [], + "checksum": checksum_file(view_summary_path), + "unrendered_config": get_unrendered_model_config(materialized="view"), + "access": "protected", + "version": None, + "latest_version": None, + "constraints": [], + }, + "seed.test.seed": { + "alias": "seed", + "build_path": None, + "created_at": ANY, + "columns": { + "id": { + "name": "id", + "description": "The user ID number", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "first_name": { + "name": "first_name", + "description": "The user's first name", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "email": { + "name": "email", + "description": "The user's email", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "ip_address": { + "name": "ip_address", + "description": "The user's IP address", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "updated_at": { + "name": "updated_at", + "description": "The last time this user's email was updated", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + }, + "config": get_rendered_seed_config(), + "deferred": False, + "depends_on": {"macros": []}, + "description": "The test seed", + "docs": {"node_color": None, "show": True}, + "fqn": ["test", "seed"], + "group": None, + "name": "seed", + "original_file_path": seed_path, + "package_name": "test", + "patch_path": "test://" + os.path.join("seeds", "schema.yml"), + "path": "seed.csv", + "raw_code": "", + "resource_type": "seed", + "root_path": project.project_root, + "schema": my_schema_name, + "database": project.database, + "tags": [], + "meta": {}, + "unique_id": "seed.test.seed", + "checksum": checksum_file(seed_path), + "unrendered_config": get_unrendered_seed_config(), + "relation_name": '"{0}"."{1}".seed'.format(project.database, my_schema_name), + }, + "snapshot.test.snapshot_seed": { + "alias": "snapshot_seed", + "compiled_path": None, + "build_path": None, + "created_at": ANY, + "checksum": checksum_file(snapshot_path), + "columns": {}, + "compiled": True, + "compiled_code": ANY, + "config": get_rendered_snapshot_config(target_schema=alternate_schema), + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "database": model_database, + "deferred": False, + "depends_on": {"macros": [], "nodes": ["seed.test.seed"]}, + "description": "", + "docs": {"node_color": None, "show": True}, + "extra_ctes": [], + "extra_ctes_injected": True, + "fqn": ["test", "snapshot_seed", "snapshot_seed"], + "group": None, + "metrics": [], + "meta": {}, + "name": "snapshot_seed", + "original_file_path": snapshot_path, + "package_name": "test", + "patch_path": None, + "path": "snapshot_seed.sql", + "raw_code": ANY, + "language": "sql", + "refs": [{"name": "seed", "package": None, "version": None}], + "relation_name": '"{0}"."{1}".snapshot_seed'.format( + model_database, alternate_schema + ), + "resource_type": "snapshot", + "schema": alternate_schema, + "sources": [], + "tags": [], + "unique_id": "snapshot.test.snapshot_seed", + "unrendered_config": get_unrendered_snapshot_config( + target_schema=alternate_schema + ), + }, + }, + "sources": { + "source.test.my_source.my_table": { + "columns": { + "id": { + "description": "An ID field", + "name": "id", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + } + }, + "config": { + "enabled": True, + }, + "quoting": { + "database": False, + "schema": None, + "identifier": True, + "column": None, + }, + "created_at": ANY, + "database": project.database, + "description": "My table", + "external": None, + "freshness": { + "error_after": {"count": None, "period": None}, + "warn_after": {"count": None, "period": None}, + "filter": None, + }, + "identifier": "seed", + "loaded_at_field": None, + "loader": "a_loader", + "meta": {}, + "name": "my_table", + "original_file_path": os.path.join("models", "schema.yml"), + "package_name": "test", + "path": os.path.join("models", "schema.yml"), + "patch_path": None, + "relation_name": '{0}."{1}"."seed"'.format(project.database, my_schema_name), + "resource_type": "source", + "schema": my_schema_name, + "source_description": "My source", + "source_name": "my_source", + "source_meta": {}, + "tags": [], + "unique_id": "source.test.my_source.my_table", + "fqn": ["test", "my_source", "my_table"], + "unrendered_config": {}, + }, + }, + "exposures": { + "exposure.test.notebook_exposure": { + "created_at": ANY, + "depends_on": { + "macros": [], + "nodes": ["model.test.view_summary"], + }, + "description": "A description of the complex exposure", + "label": None, + "config": { + "enabled": True, + }, + "fqn": ["test", "notebook_exposure"], + "maturity": "medium", + "meta": {"tool": "my_tool", "languages": ["python"]}, + "metrics": [], + "tags": ["my_department"], + "name": "notebook_exposure", + "original_file_path": os.path.join("models", "schema.yml"), + "owner": {"email": "something@example.com", "name": "Some name"}, + "package_name": "test", + "path": "schema.yml", + "refs": [{"name": "view_summary", "package": None, "version": None}], + "resource_type": "exposure", + "sources": [], + "type": "notebook", + "unique_id": "exposure.test.notebook_exposure", + "url": "http://example.com/notebook/1", + "unrendered_config": {}, + }, + }, + "metrics": {}, + "groups": { + "group.test.test_group": { + "name": "test_group", + "resource_type": "group", + "original_file_path": os.path.join("models", "schema.yml"), + "owner": {"email": "test_group@test.com", "name": None}, + "package_name": "test", + "path": "schema.yml", + "unique_id": "group.test.test_group", + } + }, + "selectors": {}, + "docs": { + "doc.dbt.__overview__": ANY, + "doc.test.column_info": { + "block_contents": "An ID field", + "resource_type": "doc", + "name": "column_info", + "original_file_path": docs_path, + "package_name": "test", + "path": "docs.md", + "unique_id": "doc.test.column_info", + }, + "doc.test.ephemeral_summary": { + "block_contents": ("A summmary table of the ephemeral copy of the seed data"), + "resource_type": "doc", + "name": "ephemeral_summary", + "original_file_path": docs_path, + "package_name": "test", + "path": "docs.md", + "unique_id": "doc.test.ephemeral_summary", + }, + "doc.test.source_info": { + "block_contents": "My source", + "resource_type": "doc", + "name": "source_info", + "original_file_path": docs_path, + "package_name": "test", + "path": "docs.md", + "unique_id": "doc.test.source_info", + }, + "doc.test.summary_count": { + "block_contents": "The number of instances of the first name", + "resource_type": "doc", + "name": "summary_count", + "original_file_path": docs_path, + "package_name": "test", + "path": "docs.md", + "unique_id": "doc.test.summary_count", + }, + "doc.test.summary_first_name": { + "block_contents": "The first name being summarized", + "resource_type": "doc", + "name": "summary_first_name", + "original_file_path": docs_path, + "package_name": "test", + "path": "docs.md", + "unique_id": "doc.test.summary_first_name", + }, + "doc.test.table_info": { + "block_contents": "My table", + "resource_type": "doc", + "name": "table_info", + "original_file_path": docs_path, + "package_name": "test", + "path": "docs.md", + "unique_id": "doc.test.table_info", + }, + "doc.test.view_summary": { + "block_contents": ("A view of the summary of the ephemeral copy of the seed data"), + "resource_type": "doc", + "name": "view_summary", + "original_file_path": docs_path, + "package_name": "test", + "path": "docs.md", + "unique_id": "doc.test.view_summary", + }, + "doc.test.macro_info": { + "block_contents": "My custom test that I wrote that does nothing", + "resource_type": "doc", + "name": "macro_info", + "original_file_path": os.path.join("macros", "macro.md"), + "package_name": "test", + "path": "macro.md", + "unique_id": "doc.test.macro_info", + }, + "doc.test.notebook_info": { + "block_contents": "A description of the complex exposure", + "resource_type": "doc", + "name": "notebook_info", + "original_file_path": docs_path, + "package_name": "test", + "path": "docs.md", + "unique_id": "doc.test.notebook_info", + }, + "doc.test.macro_arg_info": { + "block_contents": "The model for my custom test", + "resource_type": "doc", + "name": "macro_arg_info", + "original_file_path": os.path.join("macros", "macro.md"), + "package_name": "test", + "path": "macro.md", + "unique_id": "doc.test.macro_arg_info", + }, + }, + "child_map": { + "model.test.ephemeral_copy": ["model.test.ephemeral_summary"], + "exposure.test.notebook_exposure": [], + "model.test.ephemeral_summary": ["model.test.view_summary"], + "model.test.view_summary": ["exposure.test.notebook_exposure"], + "seed.test.seed": ["snapshot.test.snapshot_seed"], + "snapshot.test.snapshot_seed": [], + "source.test.my_source.my_table": ["model.test.ephemeral_copy"], + }, + "parent_map": { + "model.test.ephemeral_copy": ["source.test.my_source.my_table"], + "model.test.ephemeral_summary": ["model.test.ephemeral_copy"], + "model.test.view_summary": ["model.test.ephemeral_summary"], + "exposure.test.notebook_exposure": ["model.test.view_summary"], + "seed.test.seed": [], + "snapshot.test.snapshot_seed": ["seed.test.seed"], + "source.test.my_source.my_table": [], + }, + "group_map": {"test_group": ["model.test.ephemeral_summary"]}, + "disabled": {}, + "macros": { + "macro.test.test_nothing": { + "name": "test_nothing", + "depends_on": {"macros": []}, + "created_at": ANY, + "description": "My custom test that I wrote that does nothing", + "docs": {"node_color": None, "show": True}, + "macro_sql": AnyStringWith("test nothing"), + "original_file_path": os.path.join("macros", "dummy_test.sql"), + "path": os.path.join("macros", "dummy_test.sql"), + "package_name": "test", + "meta": { + "some_key": 100, + }, + "patch_path": "test://" + os.path.join("macros", "schema.yml"), + "resource_type": "macro", + "unique_id": "macro.test.test_nothing", + "supported_languages": None, + "arguments": [ + { + "name": "model", + "type": "Relation", + "description": "The model for my custom test", + }, + ], + } + }, + "semantic_models": {}, + "unit_tests": {}, + "saved_queries": {}, + } + + +def expected_versions_manifest(project): + model_database = project.database + my_schema_name = project.test_schema + + versioned_model_v1_path = os.path.join("models", "arbitrary_file_name.sql") + versioned_model_v2_path = os.path.join("models", "versioned_model_v2.sql") + ref_versioned_model_path = os.path.join("models", "ref_versioned_model.sql") + compiled_model_path = os.path.join("target", "compiled", "test", "models") + schema_yml_path = os.path.join("models", "schema.yml") + + versioned_model_v1_sql = read_file_replace_returns(versioned_model_v1_path).rstrip("\r\n") + versioned_model_v2_sql = read_file_replace_returns(versioned_model_v2_path).rstrip("\r\n") + ref_versioned_model_sql = read_file_replace_returns(ref_versioned_model_path).rstrip("\r\n") + + test_config = get_rendered_tst_config() + unrendered_test_config = get_unrendered_tst_config() + test_audit_schema = my_schema_name + "_dbt_test__audit" + model_schema_yml_path = os.path.join("models", "schema.yml") + + return { + "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v7.json", + "dbt_version": dbt.version.__version__, + "nodes": { + "model.test.versioned_model.v1": { + "alias": "versioned_model_v1", + "compiled_path": os.path.join(compiled_model_path, "arbitrary_file_name.sql"), + "build_path": None, + "created_at": ANY, + "columns": { + "first_name": { + "description": "The first name being summarized", + "name": "first_name", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "ct": { + "description": "The number of instances of the first name", + "name": "ct", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + }, + "config": get_rendered_model_config( + materialized="table", + group="test_group", + meta={"size": "large", "color": "blue"}, + ), + "constraints": [], + "sources": [], + "depends_on": {"macros": [], "nodes": []}, + "deferred": False, + "description": "A versioned model", + "deprecation_date": ANY, + "docs": {"node_color": None, "show": True}, + "fqn": ["test", "versioned_model", "v1"], + "group": "test_group", + "metrics": [], + "name": "versioned_model", + "original_file_path": versioned_model_v1_path, + "package_name": "test", + "patch_path": "test://" + os.path.join("models", "schema.yml"), + "path": "arbitrary_file_name.sql", + "raw_code": LineIndifferent(versioned_model_v1_sql), + "language": "sql", + "refs": [], + "relation_name": '"{0}"."{1}".versioned_model_v1'.format( + model_database, my_schema_name + ), + "resource_type": "model", + "schema": my_schema_name, + "database": project.database, + "tags": [], + "meta": {"size": "large", "color": "blue"}, + "unique_id": "model.test.versioned_model.v1", + "compiled": True, + "compiled_code": ANY, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "extra_ctes_injected": True, + "extra_ctes": [], + "checksum": checksum_file(versioned_model_v1_path), + "unrendered_config": get_unrendered_model_config( + materialized="table", + group="test_group", + meta={"size": "large", "color": "blue"}, + ), + "access": "protected", + "version": 1, + "latest_version": 2, + }, + "model.test.versioned_model.v2": { + "alias": "versioned_model_v2", + "compiled_path": os.path.join(compiled_model_path, "versioned_model_v2.sql"), + "build_path": None, + "created_at": ANY, + "columns": { + "first_name": { + "description": "The first name being summarized", + "name": "first_name", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + "extra": { + "description": "", + "name": "extra", + "data_type": None, + "meta": {}, + "quote": None, + "tags": [], + "constraints": [], + }, + }, + "config": get_rendered_model_config( + materialized="view", group="test_group", meta={"size": "large", "color": "red"} + ), + "constraints": [], + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "sources": [], + "depends_on": {"macros": [], "nodes": []}, + "deferred": False, + "description": "A versioned model", + "deprecation_date": None, + "docs": {"node_color": None, "show": True}, + "fqn": ["test", "versioned_model", "v2"], + "group": "test_group", + "metrics": [], + "name": "versioned_model", + "original_file_path": versioned_model_v2_path, + "package_name": "test", + "patch_path": "test://" + os.path.join("models", "schema.yml"), + "path": "versioned_model_v2.sql", + "raw_code": LineIndifferent(versioned_model_v2_sql), + "language": "sql", + "refs": [], + "relation_name": '"{0}"."{1}".versioned_model_v2'.format( + model_database, my_schema_name + ), + "resource_type": "model", + "schema": my_schema_name, + "database": project.database, + "tags": [], + "meta": {"size": "large", "color": "red"}, + "unique_id": "model.test.versioned_model.v2", + "compiled": True, + "compiled_code": ANY, + "extra_ctes_injected": True, + "extra_ctes": [], + "checksum": checksum_file(versioned_model_v2_path), + "unrendered_config": get_unrendered_model_config( + materialized="view", group="test_group", meta={"size": "large", "color": "red"} + ), + "access": "protected", + "version": 2, + "latest_version": 2, + }, + "model.test.ref_versioned_model": { + "alias": "ref_versioned_model", + "compiled_path": os.path.join(compiled_model_path, "ref_versioned_model.sql"), + "build_path": None, + "created_at": ANY, + "columns": {}, + "config": get_rendered_model_config(), + "constraints": [], + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "database": project.database, + "depends_on": { + "macros": [], + "nodes": [ + "model.test.versioned_model.v2", + "model.test.versioned_model.v1", + ], + }, + "deprecation_date": None, + "deferred": False, + "description": "", + "docs": {"node_color": None, "show": True}, + "fqn": ["test", "ref_versioned_model"], + "group": None, + "metrics": [], + "name": "ref_versioned_model", + "original_file_path": ref_versioned_model_path, + "package_name": "test", + "patch_path": "test://" + schema_yml_path, + "path": "ref_versioned_model.sql", + "raw_code": LineIndifferent(ref_versioned_model_sql), + "language": "sql", + "refs": [ + {"name": "versioned_model", "package": None, "version": 2}, + {"name": "versioned_model", "package": None, "version": "2"}, + {"name": "versioned_model", "package": None, "version": 2}, + {"name": "versioned_model", "package": None, "version": None}, + {"name": "versioned_model", "package": None, "version": 1}, + ], + "relation_name": '"{0}"."{1}".ref_versioned_model'.format( + model_database, my_schema_name + ), + "resource_type": "model", + "schema": my_schema_name, + "sources": [], + "tags": [], + "meta": {}, + "unique_id": "model.test.ref_versioned_model", + "compiled": True, + "compiled_code": ANY, + "extra_ctes_injected": True, + "extra_ctes": [], + "checksum": checksum_file(ref_versioned_model_path), + "unrendered_config": get_unrendered_model_config(), + "access": "protected", + "version": None, + "latest_version": None, + }, + "test.test.unique_versioned_model_v1_first_name.6138195dec": { + "alias": "unique_versioned_model_v1_first_name", + "attached_node": "model.test.versioned_model.v1", + "compiled_path": os.path.join( + compiled_model_path, "schema.yml", "unique_versioned_model_v1_first_name.sql" + ), + "build_path": None, + "created_at": ANY, + "column_name": "first_name", + "columns": {}, + "config": test_config, + "group": "test_group", + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "sources": [], + "depends_on": { + "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], + "nodes": ["model.test.versioned_model.v1"], + }, + "deferred": False, + "description": "", + "file_key_name": "models.versioned_model", + "fqn": ["test", "unique_versioned_model_v1_first_name"], + "metrics": [], + "name": "unique_versioned_model_v1_first_name", + "original_file_path": model_schema_yml_path, + "package_name": "test", + "patch_path": None, + "path": "unique_versioned_model_v1_first_name.sql", + "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", + "language": "sql", + "refs": [{"name": "versioned_model", "package": None, "version": 1}], + "relation_name": None, + "resource_type": "test", + "schema": test_audit_schema, + "database": project.database, + "tags": [], + "meta": {}, + "unique_id": "test.test.unique_versioned_model_v1_first_name.6138195dec", + "docs": {"node_color": None, "show": True}, + "compiled": True, + "compiled_code": AnyStringWith("count(*)"), + "extra_ctes_injected": True, + "extra_ctes": [], + "test_metadata": { + "namespace": None, + "name": "unique", + "kwargs": { + "column_name": "first_name", + "model": "{{ get_where_subquery(ref('versioned_model', version='1')) }}", + }, + }, + "checksum": {"name": "none", "checksum": ""}, + "unrendered_config": unrendered_test_config, + }, + "test.test.unique_versioned_model_v1_count.0b4c0b688a": { + "alias": "unique_versioned_model_v1_count", + "attached_node": "model.test.versioned_model.v1", + "compiled_path": os.path.join( + compiled_model_path, "schema.yml", "unique_versioned_model_v1_count.sql" + ), + "build_path": None, + "created_at": ANY, + "column_name": None, + "columns": {}, + "config": test_config, + "group": "test_group", + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "sources": [], + "depends_on": { + "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], + "nodes": ["model.test.versioned_model.v1"], + }, + "deferred": False, + "description": "", + "file_key_name": "models.versioned_model", + "fqn": ["test", "unique_versioned_model_v1_count"], + "metrics": [], + "name": "unique_versioned_model_v1_count", + "original_file_path": model_schema_yml_path, + "package_name": "test", + "patch_path": None, + "path": "unique_versioned_model_v1_count.sql", + "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", + "language": "sql", + "refs": [{"name": "versioned_model", "package": None, "version": 1}], + "relation_name": None, + "resource_type": "test", + "schema": test_audit_schema, + "database": project.database, + "tags": [], + "meta": {}, + "unique_id": "test.test.unique_versioned_model_v1_count.0b4c0b688a", + "docs": {"node_color": None, "show": True}, + "compiled": True, + "compiled_code": AnyStringWith("count(*)"), + "extra_ctes_injected": True, + "extra_ctes": [], + "test_metadata": { + "namespace": None, + "name": "unique", + "kwargs": { + "column_name": "count", + "model": "{{ get_where_subquery(ref('versioned_model', version='1')) }}", + }, + }, + "checksum": {"name": "none", "checksum": ""}, + "unrendered_config": unrendered_test_config, + }, + "test.test.unique_versioned_model_v2_first_name.998430d28e": { + "alias": "unique_versioned_model_v2_first_name", + "attached_node": "model.test.versioned_model.v2", + "compiled_path": os.path.join( + compiled_model_path, "schema.yml", "unique_versioned_model_v2_first_name.sql" + ), + "build_path": None, + "created_at": ANY, + "column_name": "first_name", + "columns": {}, + "config": test_config, + "group": "test_group", + "contract": {"checksum": None, "enforced": False, "alias_types": True}, + "sources": [], + "depends_on": { + "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], + "nodes": ["model.test.versioned_model.v2"], + }, + "deferred": False, + "description": "", + "file_key_name": "models.versioned_model", + "fqn": ["test", "unique_versioned_model_v2_first_name"], + "metrics": [], + "name": "unique_versioned_model_v2_first_name", + "original_file_path": model_schema_yml_path, + "package_name": "test", + "patch_path": None, + "path": "unique_versioned_model_v2_first_name.sql", + "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", + "language": "sql", + "refs": [{"name": "versioned_model", "package": None, "version": 2}], + "relation_name": None, + "resource_type": "test", + "schema": test_audit_schema, + "database": project.database, + "tags": [], + "meta": {}, + "unique_id": "test.test.unique_versioned_model_v2_first_name.998430d28e", + "docs": {"node_color": None, "show": True}, + "compiled": True, + "compiled_code": AnyStringWith("count(*)"), + "extra_ctes_injected": True, + "extra_ctes": [], + "test_metadata": { + "namespace": None, + "name": "unique", + "kwargs": { + "column_name": "first_name", + "model": "{{ get_where_subquery(ref('versioned_model', version='2')) }}", + }, + }, + "checksum": {"name": "none", "checksum": ""}, + "unrendered_config": unrendered_test_config, + }, + }, + "exposures": { + "exposure.test.notebook_exposure": { + "created_at": ANY, + "depends_on": { + "macros": [], + "nodes": ["model.test.versioned_model.v2"], + }, + "description": "notebook_info", + "label": None, + "config": { + "enabled": True, + }, + "fqn": ["test", "notebook_exposure"], + "maturity": None, + "meta": {}, + "metrics": [], + "tags": [], + "name": "notebook_exposure", + "original_file_path": os.path.join("models", "schema.yml"), + "owner": {"email": "something@example.com", "name": "Some name"}, + "package_name": "test", + "path": "schema.yml", + "refs": [{"name": "versioned_model", "package": None, "version": 2}], + "resource_type": "exposure", + "sources": [], + "type": "notebook", + "unique_id": "exposure.test.notebook_exposure", + "url": None, + "unrendered_config": {}, + }, + }, + "metrics": {}, + "groups": { + "group.test.test_group": { + "name": "test_group", + "resource_type": "group", + "original_file_path": os.path.join("models", "schema.yml"), + "owner": {"email": "test_group@test.com", "name": None}, + "package_name": "test", + "path": "schema.yml", + "unique_id": "group.test.test_group", + } + }, + "sources": {}, + "selectors": {}, + "docs": {}, + "child_map": { + "model.test.versioned_model.v1": [ + "model.test.ref_versioned_model", + "test.test.unique_versioned_model_v1_count.0b4c0b688a", + "test.test.unique_versioned_model_v1_first_name.6138195dec", + ], + "model.test.versioned_model.v2": [ + "exposure.test.notebook_exposure", + "model.test.ref_versioned_model", + "test.test.unique_versioned_model_v2_first_name.998430d28e", + ], + "model.test.ref_versioned_model": [], + "exposure.test.notebook_exposure": [], + "test.test.unique_versioned_model_v1_first_name.6138195dec": [], + "test.test.unique_versioned_model_v1_count.0b4c0b688a": [], + "test.test.unique_versioned_model_v2_first_name.998430d28e": [], + }, + "parent_map": { + "model.test.versioned_model.v1": [], + "model.test.versioned_model.v2": [], + "model.test.ref_versioned_model": [ + "model.test.versioned_model.v1", + "model.test.versioned_model.v2", + ], + "exposure.test.notebook_exposure": ["model.test.versioned_model.v2"], + "test.test.unique_versioned_model_v1_first_name.6138195dec": [ + "model.test.versioned_model.v1" + ], + "test.test.unique_versioned_model_v1_count.0b4c0b688a": [ + "model.test.versioned_model.v1" + ], + "test.test.unique_versioned_model_v2_first_name.998430d28e": [ + "model.test.versioned_model.v2" + ], + }, + "group_map": { + "test_group": [ + "model.test.versioned_model.v1", + "model.test.versioned_model.v2", + "test.test.unique_versioned_model_v1_first_name.6138195dec", + "test.test.unique_versioned_model_v1_count.0b4c0b688a", + "test.test.unique_versioned_model_v2_first_name.998430d28e", + ] + }, + "disabled": {}, + "macros": {}, + "semantic_models": {}, + "unit_tests": {}, + "saved_queries": {}, + } diff --git a/tests/functional/artifacts/expected_run_results.py b/tests/functional/artifacts/expected_run_results.py new file mode 100644 index 000000000..3a3148eba --- /dev/null +++ b/tests/functional/artifacts/expected_run_results.py @@ -0,0 +1,243 @@ +from unittest.mock import ANY + +from dbt.tests.util import AnyFloat + + +def expected_run_results(): + """ + The expected results of this run. + """ + + return [ + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "model.test.model", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "model.test.second_model", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "seed.test.seed", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": None, + "compiled_code": ANY, + "relation_name": None, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "snapshot.test.snapshot_seed", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "test.test.not_null_model_id.d01cc630e6", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": None, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "test.test.test_nothing_model_.5d38568946", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": None, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "test.test.unique_model_id.67b76558ff", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": None, + }, + ] + + +def expected_references_run_results(): + return [ + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "model.test.ephemeral_summary", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "model.test.view_summary", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "seed.test.seed", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": None, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "snapshot.test.snapshot_seed", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + ] + + +def expected_versions_run_results(): + return [ + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "model.test.ref_versioned_model", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "model.test.versioned_model.v1", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "model.test.versioned_model.v2", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "test.test.unique_versioned_model_v1_count.0b4c0b688a", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "test.test.unique_versioned_model_v1_first_name.6138195dec", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + { + "status": "success", + "message": None, + "execution_time": AnyFloat(), + "unique_id": "test.test.unique_versioned_model_v2_first_name.998430d28e", + "adapter_response": ANY, + "thread_id": ANY, + "timing": [ANY, ANY], + "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, + }, + ] diff --git a/tests/functional/artifacts/test_artifact_fields.py b/tests/functional/artifacts/test_artifact_fields.py new file mode 100644 index 000000000..3a2f4a107 --- /dev/null +++ b/tests/functional/artifacts/test_artifact_fields.py @@ -0,0 +1,50 @@ +from dbt.tests.util import get_artifact, get_manifest, run_dbt +import pytest + + +# This is a place to put specific tests for contents of artifacts that we +# don't want to bother putting in the big artifact output test, which is +# hard to update. + + +my_model_sql = "select 1 as fun" + +schema_yml = """ +version: 2 +models: + - name: my_model + columns: + - name: fun + data_tests: + - not_null +""" + + +class TestRelationNameInTests: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "schema.yml": schema_yml, + } + + def test_relation_name_in_tests(self, project): + results = run_dbt(["run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + test_id = "test.test.not_null_my_model_fun.bf3b032a01" + assert test_id in manifest.nodes + assert manifest.nodes[test_id].relation_name is None + + results = run_dbt(["test", "--store-failures"]) + assert len(results) == 1 + # The relation_name for tests with previously generated manifest and + # store_failures passed in on the command line, will be in the manifest.json + # but not in the parsed manifest. + manifest = get_manifest(project.project_root) + assert manifest.nodes[test_id].relation_name is None + manifest_json = get_artifact(project.project_root, "target", "manifest.json") + assert test_id in manifest_json["nodes"] + relation_name = manifest_json["nodes"][test_id]["relation_name"] + assert relation_name + assert '"not_null_my_model_fun"' in relation_name diff --git a/tests/functional/artifacts/test_artifacts.py b/tests/functional/artifacts/test_artifacts.py new file mode 100644 index 000000000..2fa6be47c --- /dev/null +++ b/tests/functional/artifacts/test_artifacts.py @@ -0,0 +1,710 @@ +from datetime import datetime +import jsonschema +import os + +import dbt +from dbt.artifacts.results import RunStatus +from dbt.artifacts.run import RunResultsArtifact +from dbt.contracts.graph.manifest import WritableManifest +from dbt.tests.util import ( + check_datetime_between, + get_artifact, + run_dbt, + run_dbt_and_capture, +) +import pytest + +from tests.functional.artifacts.expected_manifest import ( + expected_references_manifest, + expected_seeded_manifest, + expected_versions_manifest, +) +from tests.functional.artifacts.expected_run_results import ( + expected_references_run_results, + expected_run_results, + expected_versions_run_results, +) + + +models__schema_yml = """ +version: 2 + +models: + - name: model + description: "The test model" + docs: + show: false + columns: + - name: id + description: The user ID number + data_tests: + - unique + - not_null + - name: first_name + description: The user's first name + - name: email + description: The user's email + - name: ip_address + description: The user's IP address + - name: updated_at + description: The last time this user's email was updated + data_tests: + - test.nothing + + - name: second_model + description: "The second test model" + docs: + show: false + columns: + - name: id + description: The user ID number + - name: first_name + description: The user's first name + - name: email + description: The user's email + - name: ip_address + description: The user's IP address + - name: updated_at + description: The last time this user's email was updated + + +sources: + - name: my_source + description: "My source" + loader: a_loader + schema: "{{ var('test_schema') }}" + tables: + - name: my_table + description: "My table" + identifier: seed + quoting: + identifier: True + columns: + - name: id + description: "An ID field" + + +exposures: + - name: simple_exposure + type: dashboard + depends_on: + - ref('model') + - source('my_source', 'my_table') + owner: + email: something@example.com + - name: notebook_exposure + type: notebook + depends_on: + - ref('model') + - ref('second_model') + owner: + email: something@example.com + name: Some name + description: > + A description of the complex exposure + maturity: medium + meta: + tool: 'my_tool' + languages: + - python + tags: ['my_department'] + url: http://example.com/notebook/1 +""" + +models__second_model_sql = """ +{{ + config( + materialized='view', + schema='test', + ) +}} + +select * from {{ ref('seed') }} +""" + +models__readme_md = """ +This is a readme.md file with {{ invalid-ish jinja }} in it +""" + +models__model_sql = """ +{{ + config( + materialized='view', + ) +}} + +select * from {{ ref('seed') }} +""" + +models__model_with_pre_hook_sql = """ +{{ + config( + pre_hook={ + "sql": "{{ alter_timezone(timezone='Etc/UTC') }}" + } + ) +}} +select current_setting('timezone') as timezone +""" + +seed__schema_yml = """ +version: 2 +seeds: + - name: seed + description: "The test seed" + columns: + - name: id + description: The user ID number + - name: first_name + description: The user's first name + - name: email + description: The user's email + - name: ip_address + description: The user's IP address + - name: updated_at + description: The last time this user's email was updated +""" + +seed__seed_csv = """id,first_name,email,ip_address,updated_at +1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 +""" + +macros__schema_yml = """ +version: 2 +macros: + - name: test_nothing + description: "{{ doc('macro_info') }}" + meta: + some_key: 100 + arguments: + - name: model + type: Relation + description: "{{ doc('macro_arg_info') }}" +""" + +macros__macro_md = """ +{% docs macro_info %} +My custom test that I wrote that does nothing +{% enddocs %} + +{% docs macro_arg_info %} +The model for my custom test +{% enddocs %} +""" + +macros__dummy_test_sql = """ +{% test nothing(model) %} + +-- a silly test to make sure that table-level tests show up in the manifest +-- without a column_name field +select 0 + +{% endtest %} +""" + +macros__alter_timezone_sql = """ +{% macro alter_timezone(timezone='America/Los_Angeles') %} +{% set sql %} + SET TimeZone='{{ timezone }}'; +{% endset %} + +{% do run_query(sql) %} +{% do log("Timezone set to: " + timezone, info=True) %} +{% endmacro %} +""" + +snapshot__snapshot_seed_sql = """ +{% snapshot snapshot_seed %} +{{ + config( + unique_key='id', + strategy='check', + check_cols='all', + target_schema=var('alternate_schema') + ) +}} +select * from {{ ref('seed') }} +{% endsnapshot %} +""" + +ref_models__schema_yml = """ +version: 2 + +groups: + - name: test_group + owner: + email: test_group@test.com + +models: + - name: ephemeral_summary + description: "{{ doc('ephemeral_summary') }}" + config: + group: test_group + columns: &summary_columns + - name: first_name + description: "{{ doc('summary_first_name') }}" + - name: ct + description: "{{ doc('summary_count') }}" + - name: view_summary + description: "{{ doc('view_summary') }}" + columns: *summary_columns + +sources: + - name: my_source + description: "{{ doc('source_info') }}" + loader: a_loader + schema: "{{ var('test_schema') }}" + quoting: + database: False + identifier: False + tables: + - name: my_table + description: "{{ doc('table_info') }}" + identifier: seed + quoting: + identifier: True + columns: + - name: id + description: "{{ doc('column_info') }}" + +exposures: + - name: notebook_exposure + type: notebook + depends_on: + - ref('view_summary') + owner: + email: something@example.com + name: Some name + description: "{{ doc('notebook_info') }}" + maturity: medium + url: http://example.com/notebook/1 + meta: + tool: 'my_tool' + languages: + - python + tags: ['my_department'] + +""" + +ref_models__view_summary_sql = """ +{{ + config( + materialized = "view" + ) +}} + +select first_name, ct from {{ref('ephemeral_summary')}} +order by ct asc + +""" + +ref_models__ephemeral_summary_sql = """ +{{ + config( + materialized = "table" + ) +}} + +select first_name, count(*) as ct from {{ref('ephemeral_copy')}} +group by first_name +order by first_name asc + +""" + +ref_models__ephemeral_copy_sql = """ +{{ + config( + materialized = "ephemeral" + ) +}} + +select * from {{ source("my_source", "my_table") }} + +""" + +ref_models__docs_md = """ +{% docs ephemeral_summary %} +A summmary table of the ephemeral copy of the seed data +{% enddocs %} + +{% docs summary_first_name %} +The first name being summarized +{% enddocs %} + +{% docs summary_count %} +The number of instances of the first name +{% enddocs %} + +{% docs view_summary %} +A view of the summary of the ephemeral copy of the seed data +{% enddocs %} + +{% docs source_info %} +My source +{% enddocs %} + +{% docs table_info %} +My table +{% enddocs %} + +{% docs column_info %} +An ID field +{% enddocs %} + +{% docs notebook_info %} +A description of the complex exposure +{% enddocs %} + +""" + +versioned_models__schema_yml = """ +version: 2 + +groups: + - name: test_group + owner: + email: test_group@test.com + +models: + - name: versioned_model + description: "A versioned model" + latest_version: 2 + config: + group: test_group + materialized: table + meta: + color: blue + size: large + data_tests: + - unique: + column_name: count + columns: + - name: first_name + description: "The first name being summarized" + data_tests: + - unique + - name: ct + description: "The number of instances of the first name" + versions: + - v: 1 + defined_in: arbitrary_file_name + deprecation_date: 2022-07-11 + - v: 2 + config: + materialized: view + meta: + color: red + data_tests: [] + columns: + - include: '*' + exclude: ['ct'] + - name: extra + - name: ref_versioned_model + +exposures: + - name: notebook_exposure + type: notebook + depends_on: + - ref('versioned_model', v=2) + owner: + email: something@example.com + name: Some name + description: "notebook_info" +""" + +versioned_models__v1_sql = """ +select "test first name" as first_name, 1 as ct +""" + +versioned_models__v2_sql = """ +select "test first name" as first_name, 1 as extra +""" + +versioned_models___ref_sql = """ +select first_name from {{ ref("versioned_model", version=2) }} +UNION ALL +select first_name from {{ ref("versioned_model", version="2") }} +UNION ALL +select first_name from {{ ref("versioned_model", v=2) }} +UNION ALL +select first_name from {{ ref("versioned_model") }} +UNION ALL +select first_name from {{ ref("versioned_model", version=1) }} +""" + + +def verify_metadata(metadata, dbt_schema_version, start_time): + assert "generated_at" in metadata + check_datetime_between(metadata["generated_at"], start=start_time) + assert "dbt_version" in metadata + assert metadata["dbt_version"] == dbt.version.__version__ + assert "dbt_schema_version" in metadata + assert metadata["dbt_schema_version"] == dbt_schema_version + key = "env_key" + if os.name == "nt": + key = key.upper() + assert metadata["env"] == {key: "env_value"} + + +def verify_manifest(project, expected_manifest, start_time, manifest_schema_path): + manifest_path = os.path.join(project.project_root, "target", "manifest.json") + assert os.path.exists(manifest_path) + manifest = get_artifact(manifest_path) + # Verify that manifest jsonschema from WritableManifest works + manifest_schema = WritableManifest.json_schema() + validate(manifest_schema, manifest) + + # Verify that stored manifest jsonschema works. + # If this fails, schemas need to be updated with: + # scripts/collect-artifact-schema.py --path schemas --artifact manifest + stored_manifest_schema = get_artifact(manifest_schema_path) + validate(stored_manifest_schema, manifest) + + manifest_keys = { + "nodes", + "sources", + "macros", + "parent_map", + "child_map", + "group_map", + "metrics", + "groups", + "docs", + "metadata", + "docs", + "disabled", + "exposures", + "selectors", + "semantic_models", + "unit_tests", + "saved_queries", + } + + assert set(manifest.keys()) == manifest_keys + + for key in manifest_keys: + if key == "macros": + verify_manifest_macros(manifest, expected_manifest.get("macros")) + elif key == "metadata": + metadata = manifest["metadata"] + dbt_schema_version = str(WritableManifest.dbt_schema_version) + verify_metadata(metadata, dbt_schema_version, start_time) + assert ( + "project_id" in metadata + and metadata["project_id"] == "098f6bcd4621d373cade4e832627b4f6" + ) + assert "project_name" in metadata and metadata["project_name"] == "test" + assert ( + "send_anonymous_usage_stats" in metadata + and metadata["send_anonymous_usage_stats"] is False + ) + assert "adapter_type" in metadata and metadata["adapter_type"] == project.adapter_type + elif key in ["nodes", "sources", "exposures", "metrics", "disabled", "docs"]: + for unique_id, node in expected_manifest[key].items(): + assert unique_id in manifest[key] + assert manifest[key][unique_id] == node, f"{unique_id} did not match" + else: # ['docs', 'parent_map', 'child_map', 'group_map', 'selectors', 'semantic_models', 'saved_queries'] + assert manifest[key] == expected_manifest[key] + + +def verify_manifest_macros(manifest, expected=None): + assert "macros" in manifest + if expected: + for unique_id, expected_macro in expected.items(): + assert unique_id in manifest["macros"] + actual_macro = manifest["macros"][unique_id] + assert expected_macro == actual_macro + + +def verify_run_results(project, expected_run_results, start_time, run_results_schema_path): + run_results_path = os.path.join(project.project_root, "target", "run_results.json") + run_results = get_artifact(run_results_path) + assert "metadata" in run_results + + # Verify that jsonschema for RunResultsArtifact works + run_results_schema = RunResultsArtifact.json_schema() + validate(run_results_schema, run_results) + + # Verify that stored run_results jsonschema works. + # If this fails, schemas need to be updated with: + # scripts/collect-artifact-schema.py --path schemas --artifact run-results + stored_run_results_schema = get_artifact(run_results_schema_path) + validate(stored_run_results_schema, run_results) + + dbt_schema_version = str(RunResultsArtifact.dbt_schema_version) + verify_metadata(run_results["metadata"], dbt_schema_version, start_time) + assert "elapsed_time" in run_results + assert run_results["elapsed_time"] > 0 + assert isinstance(run_results["elapsed_time"], float) + assert "args" in run_results + # sort the results so we can make reasonable assertions + run_results["results"].sort(key=lambda r: r["unique_id"]) + assert run_results["results"] == expected_run_results + assert set(run_results) == {"elapsed_time", "results", "metadata", "args"} + + +class BaseVerifyProject: + @pytest.fixture(scope="class", autouse=True) + def setup(self, project): + alternate_schema_name = project.test_schema + "_test" + project.create_test_schema(schema_name=alternate_schema_name) + os.environ["DBT_ENV_CUSTOM_ENV_env_key"] = "env_value" + run_dbt(["seed"]) + yield + del os.environ["DBT_ENV_CUSTOM_ENV_env_key"] + + @pytest.fixture(scope="class") + def seeds(self): + return {"schema.yml": seed__schema_yml, "seed.csv": seed__seed_csv} + + @pytest.fixture(scope="class") + def macros(self): + return { + "schema.yml": macros__schema_yml, + "macro.md": macros__macro_md, + "dummy_test.sql": macros__dummy_test_sql, + } + + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot_seed.sql": snapshot__snapshot_seed_sql} + + @pytest.fixture(scope="class") + def project_config_update(self, unique_schema): + alternate_schema = unique_schema + "_test" + return { + "vars": { + "test_schema": unique_schema, + "alternate_schema": alternate_schema, + }, + "seeds": { + "quote_columns": True, + }, + "quoting": {"identifier": False}, + } + + @pytest.fixture(scope="class") + def manifest_schema_path(self, request): + schema_version_paths = WritableManifest.dbt_schema_version.path.split("/") + manifest_schema_path = os.path.join( + request.config.rootdir, "schemas", *schema_version_paths + ) + return manifest_schema_path + + @pytest.fixture(scope="class") + def run_results_schema_path(self, request): + schema_version_paths = RunResultsArtifact.dbt_schema_version.path.split("/") + run_results_schema_path = os.path.join( + request.config.rootdir, "schemas", *schema_version_paths + ) + return run_results_schema_path + + +def validate(artifact_schema, artifact_dict): + validator = jsonschema.Draft7Validator(artifact_schema) + error = next(iter(validator.iter_errors(artifact_dict)), None) + assert error is None + + +class TestVerifyArtifacts(BaseVerifyProject): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "second_model.sql": models__second_model_sql, + "readme.md": models__readme_md, + "model.sql": models__model_sql, + } + + # Test generic "docs generate" command + def test_run_and_generate(self, project, manifest_schema_path, run_results_schema_path): + start_time = datetime.utcnow() + results = run_dbt(["compile"]) + assert len(results) == 7 + verify_manifest( + project, + expected_seeded_manifest(project, quote_model=False), + start_time, + manifest_schema_path, + ) + verify_run_results(project, expected_run_results(), start_time, run_results_schema_path) + + +class TestVerifyArtifactsReferences(BaseVerifyProject): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": ref_models__schema_yml, + "view_summary.sql": ref_models__view_summary_sql, + "ephemeral_summary.sql": ref_models__ephemeral_summary_sql, + "ephemeral_copy.sql": ref_models__ephemeral_copy_sql, + "docs.md": ref_models__docs_md, + } + + def test_references(self, project, manifest_schema_path, run_results_schema_path): + start_time = datetime.utcnow() + results = run_dbt(["compile"]) + assert len(results) == 4 + verify_manifest( + project, expected_references_manifest(project), start_time, manifest_schema_path + ) + verify_run_results( + project, expected_references_run_results(), start_time, run_results_schema_path + ) + + +class TestVerifyArtifactsVersions(BaseVerifyProject): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": versioned_models__schema_yml, + "versioned_model_v2.sql": versioned_models__v2_sql, + "arbitrary_file_name.sql": versioned_models__v1_sql, + "ref_versioned_model.sql": versioned_models___ref_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {} + + @pytest.fixture(scope="class") + def snapshots(self): + return {} + + def test_versions(self, project, manifest_schema_path, run_results_schema_path): + start_time = datetime.utcnow() + results = run_dbt(["compile"]) + assert len(results) == 6 + verify_manifest( + project, expected_versions_manifest(project), start_time, manifest_schema_path + ) + verify_run_results( + project, expected_versions_run_results(), start_time, run_results_schema_path + ) + + +class TestVerifyRunOperation(BaseVerifyProject): + @pytest.fixture(scope="class") + def macros(self): + return {"alter_timezone.sql": macros__alter_timezone_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "model_with_pre_hook.sql": models__model_with_pre_hook_sql, + } + + def test_run_operation(self, project): + results, log_output = run_dbt_and_capture(["run-operation", "alter_timezone"]) + assert len(results) == 1 + assert results[0].status == RunStatus.Success + assert results[0].unique_id == "macro.test.alter_timezone" + assert "Timezone set to: America/Los_Angeles" in log_output + + def test_run_model_with_operation(self, project): + # pre-hooks are not included in run_results since they are an attribute of the node and not a node in their + # own right + results, log_output = run_dbt_and_capture(["run", "--select", "model_with_pre_hook"]) + assert len(results) == 1 + assert results[0].status == RunStatus.Success + assert "Timezone set to: Etc/UTC" in log_output diff --git a/tests/functional/artifacts/test_docs_generate_defer.py b/tests/functional/artifacts/test_docs_generate_defer.py new file mode 100644 index 000000000..0222aa3dc --- /dev/null +++ b/tests/functional/artifacts/test_docs_generate_defer.py @@ -0,0 +1,42 @@ +import os +import shutil + +from dbt.tests.util import run_dbt +import pytest + + +model_sql = """ +select 1 as id +""" + + +class TestDocsGenerateDefer: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": model_sql} + + def copy_state(self): + assert not os.path.exists("state") + os.makedirs("state") + shutil.copyfile("target/manifest.json", "state/manifest.json") + + def test_generate_defer( + self, + project, + ): + results = run_dbt(["run"]) + assert len(results) == 1 + + # copy state files + self.copy_state() + + # defer test, it succeeds + catalog = run_dbt(["docs", "generate", "--state", "./state", "--defer"]) + assert catalog.nodes["model.test.model"] + + # Check that catalog validates with jsonschema + catalog_dict = catalog.to_dict() + try: + catalog.validate(catalog_dict) + except Exception: + raise pytest.fail("Catalog validation failed") diff --git a/tests/functional/artifacts/test_override.py b/tests/functional/artifacts/test_override.py new file mode 100644 index 000000000..b97f38198 --- /dev/null +++ b/tests/functional/artifacts/test_override.py @@ -0,0 +1,37 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +model_sql = """ +select 1 as id +""" + + +fail_macros__failure_sql = """ +{% macro get_catalog_relations(information_schema, relations) %} + {% do exceptions.raise_compiler_error('rejected: no catalogs for you') %} +{% endmacro %} + +""" + + +class TestDocsGenerateOverride: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": model_sql} + + @pytest.fixture(scope="class") + def macros(self): + return {"failure.sql": fail_macros__failure_sql} + + def test_override_used( + self, + project, + ): + results = run_dbt(["run"]) + assert len(results) == 1 + # this should pick up our failure macro and raise a compilation exception + with pytest.raises(CompilationError) as excinfo: + run_dbt(["--warn-error", "docs", "generate"]) + assert "rejected: no catalogs for you" in str(excinfo.value) diff --git a/tests/functional/artifacts/test_previous_version_state.py b/tests/functional/artifacts/test_previous_version_state.py new file mode 100644 index 000000000..0a8f1ebf5 --- /dev/null +++ b/tests/functional/artifacts/test_previous_version_state.py @@ -0,0 +1,452 @@ +import json +import os +import shutil + +from dbt.artifacts.base import get_artifact_schema_version +from dbt.artifacts.run import RunResultsArtifact +from dbt.contracts.graph.manifest import WritableManifest +from dbt.exceptions import IncompatibleSchemaError +from dbt.tests.util import get_manifest, run_dbt +import pytest + + +# This project must have one of each kind of node type, plus disabled versions, for +# test coverage to be complete. +models__my_model_sql = """ +select 1 as id +""" + +models__disabled_model_sql = """ +{{ config(enabled=False) }} +select 2 as id +""" + +seeds__my_seed_csv = """ +id,value +4,2 +""" + +seeds__disabled_seed_csv = """ +id,value +6,4 +""" + +docs__somedoc_md = """ +{% docs somedoc %} +Testing, testing +{% enddocs %} +""" + +macros__do_nothing_sql = """ +{% macro do_nothing(foo2, bar2) %} + select + '{{ foo2 }}' as foo2, + '{{ bar2 }}' as bar2 +{% endmacro %} +""" + +macros__dummy_test_sql = """ +{% test check_nothing(model) %} +-- a silly test to make sure that table-level tests show up in the manifest +-- without a column_name field + +select 0 + +{% endtest %} +""" + +macros__disabled_dummy_test_sql = """ +{% test disabled_check_nothing(model) %} +-- a silly test to make sure that table-level tests show up in the manifest +-- without a column_name field + +{{ config(enabled=False) }} +select 0 + +{% endtest %} +""" + +snapshot__snapshot_seed_sql = """ +{% snapshot snapshot_seed %} +{{ + config( + unique_key='id', + strategy='check', + check_cols='all', + target_schema=schema, + ) +}} +select * from {{ ref('my_seed') }} +{% endsnapshot %} +""" + +snapshot__disabled_snapshot_seed_sql = """ +{% snapshot disabled_snapshot_seed %} +{{ + config( + unique_key='id', + strategy='check', + check_cols='all', + target_schema=schema, + enabled=False, + ) +}} +select * from {{ ref('my_seed') }} +{% endsnapshot %} +""" + +tests__just_my_sql = """ +{{ config(tags = ['data_test_tag']) }} + +select * from {{ ref('my_model') }} +where false +""" + +tests__disabled_just_my_sql = """ +{{ config(enabled=False) }} + +select * from {{ ref('my_model') }} +where false +""" + +analyses__a_sql = """ +select 4 as id +""" + +analyses__disabled_a_sql = """ +{{ config(enabled=False) }} +select 9 as id +""" + +metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day +""" + +# Use old attribute names (v1.0-1.2) to test forward/backward compatibility with the rename in v1.3 +models__schema_yml = """ +version: 2 +models: + - name: my_model + description: "Example model" + data_tests: + - check_nothing + - disabled_check_nothing + columns: + - name: id + data_tests: + - not_null + +semantic_models: + - name: semantic_people + model: ref('my_model') + dimensions: + - name: favorite_color + type: categorical + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + - name: people + agg: count + expr: id + - name: customers + agg: count + expr: id + entities: + - name: id + type: primary + defaults: + agg_time_dimension: created_at + +metrics: + - name: blue_customers_post_2010 + label: Blue Customers since 2010 + type: simple + filter: "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'" + type_params: + measure: + name: customers + filter: "{{ Dimension('id__favorite_color') }} = 'blue'" + - name: customers + label: Customers Metric + type: simple + type_params: + measure: customers + - name: disabled_metric + label: Count records + config: + enabled: False + filter: "{{ Dimension('id__favorite_color') }} = 'blue'" + type: simple + type_params: + measure: customers + - name: ratio_of_blue_customers_to_red_customers + label: Very Important Customer Color Ratio + type: ratio + type_params: + numerator: + name: customers + filter: "{{ Dimension('id__favorite_color')}} = 'blue'" + denominator: + name: customers + filter: "{{ Dimension('id__favorite_color')}} = 'red'" + - name: doubled_blue_customers + type: derived + label: Inflated blue customer numbers + type_params: + expr: 'customers * 2' + metrics: + - name: customers + filter: "{{ Dimension('id__favorite_color')}} = 'blue'" + + +sources: + - name: my_source + description: "My source" + loader: a_loader + tables: + - name: my_table + description: "My table" + identifier: my_seed + - name: disabled_table + description: "Disabled table" + config: + enabled: False + +exposures: + - name: simple_exposure + type: dashboard + depends_on: + - ref('my_model') + - source('my_source', 'my_table') + owner: + email: something@example.com + - name: disabled_exposure + type: dashboard + config: + enabled: False + depends_on: + - ref('my_model') + owner: + email: something@example.com + +seeds: + - name: disabled_seed + config: + enabled: False +""" + +# SETUP: Using this project, we have run past minor versions of dbt +# to generate each contracted version of `manifest.json`. + +# Whenever we bump the manifest version, we should add a new entry for that version +# into `data`, generated from this same project, and update the CURRENT_EXPECTED_MANIFEST_VERSION. +# You can generate the manifest using the generate_latest_manifest() method below. + +# TEST: Then, using the *current* version of dbt (this branch), +# we will perform a `--state` comparison against those older manifests. + +# Some comparisons should succeed, where we expect backward/forward compatibility. + +# Comparisons against older versions should fail, because the structure of the +# WritableManifest class has changed in ways that prevent successful deserialization +# of older JSON manifests. + + +# We are creating enabled versions of every node type that might be in the manifest, +# plus disabled versions for types that support it (everything except macros and docs). + + +class TestPreviousVersionState: + CURRENT_EXPECTED_MANIFEST_VERSION = 12 + CURRENT_EXPECTED_RUN_RESULTS_VERSION = 6 + + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": models__my_model_sql, + "schema.yml": models__schema_yml, + "somedoc.md": docs__somedoc_md, + "disabled_model.sql": models__disabled_model_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "my_seed.csv": seeds__my_seed_csv, + "disabled_seed.csv": seeds__disabled_seed_csv, + } + + @pytest.fixture(scope="class") + def snapshots(self): + return { + "snapshot_seed.sql": snapshot__snapshot_seed_sql, + "disabled_snapshot_seed.sql": snapshot__disabled_snapshot_seed_sql, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "just_my.sql": tests__just_my_sql, + "disabled_just_my.sql": tests__disabled_just_my_sql, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "do_nothing.sql": macros__do_nothing_sql, + "dummy_test.sql": macros__dummy_test_sql, + "disabled_dummy_test.sql": macros__disabled_dummy_test_sql, + } + + @pytest.fixture(scope="class") + def analyses(self): + return { + "a.sql": analyses__a_sql, + "disabled_al.sql": analyses__disabled_a_sql, + } + + def test_project(self, project): + # This is mainly used to test changes to the test project in isolation from + # the other noise. + results = run_dbt(["run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + # model, snapshot, seed, singular test, generic test, analysis + assert len(manifest.nodes) == 8 + assert len(manifest.sources) == 1 + assert len(manifest.exposures) == 1 + assert len(manifest.metrics) == 4 + # disabled model, snapshot, seed, singular test, generic test, analysis, source, exposure, metric + assert len(manifest.disabled) == 9 + assert "macro.test.do_nothing" in manifest.macros + + # Use this method when generating a new manifest version for the first time. + # Once generated, we shouldn't need to re-generate or modify the manifest. + def generate_latest_manifest( + self, + project, + current_manifest_version, + ): + run_dbt(["parse"]) + source_path = os.path.join(project.project_root, "target/manifest.json") + state_path = os.path.join(project.test_data_dir, f"state/v{current_manifest_version}") + target_path = os.path.join(state_path, "manifest.json") + os.makedirs(state_path, exist_ok=True) + shutil.copyfile(source_path, target_path) + + # Use this method when generating a new run_results version for the first time. + # Once generated, we shouldn't need to re-generate or modify the manifest. + def generate_latest_run_results( + self, + project, + current_run_results_version, + ): + run_dbt(["run"]) + source_path = os.path.join(project.project_root, "target/run_results.json") + state_path = os.path.join(project.test_data_dir, f"results/v{current_run_results_version}") + target_path = os.path.join(state_path, "run_results.json") + os.makedirs(state_path, exist_ok=True) + shutil.copyfile(source_path, target_path) + + # The actual test method. Run `dbt list --select state:modified --state ...` + # once for each past manifest version. They all have the same content, but different + # schema/structure, only some of which are forward-compatible with the + # current WritableManifest class. + def compare_previous_state( + self, + project, + compare_manifest_version, + expect_pass, + num_results, + ): + state_path = os.path.join(project.test_data_dir, f"state/v{compare_manifest_version}") + cli_args = [ + "list", + "--resource-types", + "model", + "--select", + "state:modified", + "--state", + state_path, + ] + if expect_pass: + results = run_dbt(cli_args, expect_pass=expect_pass) + assert len(results) == num_results + else: + with pytest.raises(IncompatibleSchemaError): + run_dbt(cli_args, expect_pass=expect_pass) + + # The actual test method. Run `dbt retry --state ...` + # once for each past run_results version. They all have the same content, but different + # schema/structure, only some of which are forward-compatible with the + # current WritableManifest class. + def compare_previous_results( + self, + project, + compare_run_results_version, + expect_pass, + num_results, + ): + state_path = os.path.join(project.test_data_dir, f"results/v{compare_run_results_version}") + cli_args = [ + "retry", + "--state", + state_path, + ] + if expect_pass: + results = run_dbt(cli_args, expect_pass=expect_pass) + assert len(results) == num_results + else: + with pytest.raises(IncompatibleSchemaError): + run_dbt(cli_args, expect_pass=expect_pass) + + def test_compare_state_current(self, project): + current_manifest_schema_version = WritableManifest.dbt_schema_version.version + assert ( + current_manifest_schema_version == self.CURRENT_EXPECTED_MANIFEST_VERSION + ), "Sounds like you've bumped the manifest version and need to update this test!" + # If we need a newly generated manifest, uncomment the following line and commit the result + # self.generate_latest_manifest(project, current_manifest_schema_version) + self.compare_previous_state(project, current_manifest_schema_version, True, 0) + + def test_backwards_compatible_versions(self, project): + # manifest schema version 4 and greater should always be forward compatible + for schema_version in range(4, 10): + self.compare_previous_state(project, schema_version, True, 1) + for schema_version in range(10, self.CURRENT_EXPECTED_MANIFEST_VERSION): + self.compare_previous_state(project, schema_version, True, 0) + + def test_nonbackwards_compatible_versions(self, project): + # schema versions 1, 2, 3 are all not forward compatible + for schema_version in range(1, 4): + self.compare_previous_state(project, schema_version, False, 0) + + def test_get_manifest_schema_version(self, project): + for schema_version in range(1, self.CURRENT_EXPECTED_MANIFEST_VERSION): + manifest_path = os.path.join( + project.test_data_dir, f"state/v{schema_version}/manifest.json" + ) + manifest = json.load(open(manifest_path)) + + manifest_version = get_artifact_schema_version(manifest) + assert manifest_version == schema_version + + def test_compare_results_current(self, project): + current_run_results_schema_version = RunResultsArtifact.dbt_schema_version.version + assert ( + current_run_results_schema_version == self.CURRENT_EXPECTED_RUN_RESULTS_VERSION + ), "Sounds like you've bumped the run_results version and need to update this test!" + # If we need a newly generated run_results, uncomment the following line and commit the result + # self.generate_latest_run_results(project, current_run_results_schema_version) + self.compare_previous_results(project, current_run_results_schema_version, True, 0) + + def test_backwards_compatible_run_results_versions(self, project): + # run_results schema version 4 and greater should always be forward compatible + for schema_version in range(4, self.CURRENT_EXPECTED_RUN_RESULTS_VERSION): + self.compare_previous_results(project, schema_version, True, 0) diff --git a/tests/functional/artifacts/test_run_results.py b/tests/functional/artifacts/test_run_results.py new file mode 100644 index 000000000..10934ea98 --- /dev/null +++ b/tests/functional/artifacts/test_run_results.py @@ -0,0 +1,74 @@ +import json +from multiprocessing import Process +from pathlib import Path + +from dbt.tests.util import run_dbt +import pytest + + +good_model_sql = """ +select 1 as id +""" + + +bad_model_sql = """ +something bad +""" + + +slow_model_sql = """ +{{ config(materialized='table') }} +select id from {{ ref('good_model') }}, pg_sleep(5) +""" + + +class TestRunResultsTimingSuccess: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": good_model_sql} + + def test_timing_exists(self, project): + results = run_dbt(["run"]) + assert len(results.results) == 1 + assert len(results.results[0].timing) > 0 + + +class TestRunResultsTimingFailure: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": bad_model_sql} + + def test_timing_exists(self, project): + results = run_dbt(["run"], expect_pass=False) + assert len(results.results) == 1 + assert len(results.results[0].timing) > 0 + + +# This test is failing due to the faulty assumptions that run_results.json would +# be written multiple times. Temporarily disabling. +@pytest.mark.skip() +class TestRunResultsWritesFileOnSignal: + @pytest.fixture(scope="class") + def models(self): + return {"good_model.sql": good_model_sql, "slow_model.sql": slow_model_sql} + + def test_run_results_are_written_on_signal(self, project): + # Start the runner in a seperate process. + external_process_dbt = Process( + target=run_dbt, args=([["run"]]), kwargs={"expect_pass": False} + ) + external_process_dbt.start() + assert external_process_dbt.is_alive() + + # Wait until the first file write, then kill the process. + run_results_file = Path(project.project_root) / "target/run_results.json" + while run_results_file.is_file() is False: + pass + external_process_dbt.terminate() + + # Wait until the process is dead, then check the file that there is only one result. + while external_process_dbt.is_alive() is True: + pass + with run_results_file.open() as run_results_str: + run_results = json.loads(run_results_str.read()) + assert len(run_results["results"]) == 1 diff --git a/tests/functional/basic/data/seed-initial.csv b/tests/functional/basic/data/seed-initial.csv new file mode 100644 index 000000000..640af6c4e --- /dev/null +++ b/tests/functional/basic/data/seed-initial.csv @@ -0,0 +1,101 @@ +id,first_name,last_name,email,gender,ip_address +1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 +2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 +3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 +4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 +5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 +6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220 +7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64 +8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13 +9,Gary,Day,gday8@nih.gov,Male,35.81.68.186 +10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100 +11,Raymond,Kelley,rkelleya@fc2.com,Male,213.65.166.67 +12,Gerald,Robinson,grobinsonb@disqus.com,Male,72.232.194.193 +13,Mildred,Martinez,mmartinezc@samsung.com,Female,198.29.112.5 +14,Dennis,Arnold,darnoldd@google.com,Male,86.96.3.250 +15,Judy,Gray,jgraye@opensource.org,Female,79.218.162.245 +16,Theresa,Garza,tgarzaf@epa.gov,Female,21.59.100.54 +17,Gerald,Robertson,grobertsong@csmonitor.com,Male,131.134.82.96 +18,Philip,Hernandez,phernandezh@adobe.com,Male,254.196.137.72 +19,Julia,Gonzalez,jgonzalezi@cam.ac.uk,Female,84.240.227.174 +20,Andrew,Davis,adavisj@patch.com,Male,9.255.67.25 +21,Kimberly,Harper,kharperk@foxnews.com,Female,198.208.120.253 +22,Mark,Martin,mmartinl@marketwatch.com,Male,233.138.182.153 +23,Cynthia,Ruiz,cruizm@google.fr,Female,18.178.187.201 +24,Samuel,Carroll,scarrolln@youtu.be,Male,128.113.96.122 +25,Jennifer,Larson,jlarsono@vinaora.com,Female,98.234.85.95 +26,Ashley,Perry,aperryp@rakuten.co.jp,Female,247.173.114.52 +27,Howard,Rodriguez,hrodriguezq@shutterfly.com,Male,231.188.95.26 +28,Amy,Brooks,abrooksr@theatlantic.com,Female,141.199.174.118 +29,Louise,Warren,lwarrens@adobe.com,Female,96.105.158.28 +30,Tina,Watson,twatsont@myspace.com,Female,251.142.118.177 +31,Janice,Kelley,jkelleyu@creativecommons.org,Female,239.167.34.233 +32,Terry,Mccoy,tmccoyv@bravesites.com,Male,117.201.183.203 +33,Jeffrey,Morgan,jmorganw@surveymonkey.com,Male,78.101.78.149 +34,Louis,Harvey,lharveyx@sina.com.cn,Male,51.50.0.167 +35,Philip,Miller,pmillery@samsung.com,Male,103.255.222.110 +36,Willie,Marshall,wmarshallz@ow.ly,Male,149.219.91.68 +37,Patrick,Lopez,plopez10@redcross.org,Male,250.136.229.89 +38,Adam,Jenkins,ajenkins11@harvard.edu,Male,7.36.112.81 +39,Benjamin,Cruz,bcruz12@linkedin.com,Male,32.38.98.15 +40,Ruby,Hawkins,rhawkins13@gmpg.org,Female,135.171.129.255 +41,Carlos,Barnes,cbarnes14@a8.net,Male,240.197.85.140 +42,Ruby,Griffin,rgriffin15@bravesites.com,Female,19.29.135.24 +43,Sean,Mason,smason16@icq.com,Male,159.219.155.249 +44,Anthony,Payne,apayne17@utexas.edu,Male,235.168.199.218 +45,Steve,Cruz,scruz18@pcworld.com,Male,238.201.81.198 +46,Anthony,Garcia,agarcia19@flavors.me,Male,25.85.10.18 +47,Doris,Lopez,dlopez1a@sphinn.com,Female,245.218.51.238 +48,Susan,Nichols,snichols1b@freewebs.com,Female,199.99.9.61 +49,Wanda,Ferguson,wferguson1c@yahoo.co.jp,Female,236.241.135.21 +50,Andrea,Pierce,apierce1d@google.co.uk,Female,132.40.10.209 +51,Lawrence,Phillips,lphillips1e@jugem.jp,Male,72.226.82.87 +52,Judy,Gilbert,jgilbert1f@multiply.com,Female,196.250.15.142 +53,Eric,Williams,ewilliams1g@joomla.org,Male,222.202.73.126 +54,Ralph,Romero,rromero1h@sogou.com,Male,123.184.125.212 +55,Jean,Wilson,jwilson1i@ocn.ne.jp,Female,176.106.32.194 +56,Lori,Reynolds,lreynolds1j@illinois.edu,Female,114.181.203.22 +57,Donald,Moreno,dmoreno1k@bbc.co.uk,Male,233.249.97.60 +58,Steven,Berry,sberry1l@eepurl.com,Male,186.193.50.50 +59,Theresa,Shaw,tshaw1m@people.com.cn,Female,120.37.71.222 +60,John,Stephens,jstephens1n@nationalgeographic.com,Male,191.87.127.115 +61,Richard,Jacobs,rjacobs1o@state.tx.us,Male,66.210.83.155 +62,Andrew,Lawson,alawson1p@over-blog.com,Male,54.98.36.94 +63,Peter,Morgan,pmorgan1q@rambler.ru,Male,14.77.29.106 +64,Nicole,Garrett,ngarrett1r@zimbio.com,Female,21.127.74.68 +65,Joshua,Kim,jkim1s@edublogs.org,Male,57.255.207.41 +66,Ralph,Roberts,rroberts1t@people.com.cn,Male,222.143.131.109 +67,George,Montgomery,gmontgomery1u@smugmug.com,Male,76.75.111.77 +68,Gerald,Alvarez,galvarez1v@flavors.me,Male,58.157.186.194 +69,Donald,Olson,dolson1w@whitehouse.gov,Male,69.65.74.135 +70,Carlos,Morgan,cmorgan1x@pbs.org,Male,96.20.140.87 +71,Aaron,Stanley,astanley1y@webnode.com,Male,163.119.217.44 +72,Virginia,Long,vlong1z@spiegel.de,Female,204.150.194.182 +73,Robert,Berry,rberry20@tripadvisor.com,Male,104.19.48.241 +74,Antonio,Brooks,abrooks21@unesco.org,Male,210.31.7.24 +75,Ruby,Garcia,rgarcia22@ovh.net,Female,233.218.162.214 +76,Jack,Hanson,jhanson23@blogtalkradio.com,Male,31.55.46.199 +77,Kathryn,Nelson,knelson24@walmart.com,Female,14.189.146.41 +78,Jason,Reed,jreed25@printfriendly.com,Male,141.189.89.255 +79,George,Coleman,gcoleman26@people.com.cn,Male,81.189.221.144 +80,Rose,King,rking27@ucoz.com,Female,212.123.168.231 +81,Johnny,Holmes,jholmes28@boston.com,Male,177.3.93.188 +82,Katherine,Gilbert,kgilbert29@altervista.org,Female,199.215.169.61 +83,Joshua,Thomas,jthomas2a@ustream.tv,Male,0.8.205.30 +84,Julie,Perry,jperry2b@opensource.org,Female,60.116.114.192 +85,Richard,Perry,rperry2c@oracle.com,Male,181.125.70.232 +86,Kenneth,Ruiz,kruiz2d@wikimedia.org,Male,189.105.137.109 +87,Jose,Morgan,jmorgan2e@webnode.com,Male,101.134.215.156 +88,Donald,Campbell,dcampbell2f@goo.ne.jp,Male,102.120.215.84 +89,Debra,Collins,dcollins2g@uol.com.br,Female,90.13.153.235 +90,Jesse,Johnson,jjohnson2h@stumbleupon.com,Male,225.178.125.53 +91,Elizabeth,Stone,estone2i@histats.com,Female,123.184.126.221 +92,Angela,Rogers,arogers2j@goodreads.com,Female,98.104.132.187 +93,Emily,Dixon,edixon2k@mlb.com,Female,39.190.75.57 +94,Albert,Scott,ascott2l@tinypic.com,Male,40.209.13.189 +95,Barbara,Peterson,bpeterson2m@ow.ly,Female,75.249.136.180 +96,Adam,Greene,agreene2n@fastcompany.com,Male,184.173.109.144 +97,Earl,Sanders,esanders2o@hc360.com,Male,247.34.90.117 +98,Angela,Brooks,abrooks2p@mtv.com,Female,10.63.249.126 +99,Harold,Foster,hfoster2q@privacy.gov.au,Male,139.214.40.244 +100,Carl,Meyer,cmeyer2r@disqus.com,Male,204.117.7.88 diff --git a/tests/functional/basic/data/seed-update.csv b/tests/functional/basic/data/seed-update.csv new file mode 100644 index 000000000..5b93306a2 --- /dev/null +++ b/tests/functional/basic/data/seed-update.csv @@ -0,0 +1,201 @@ +id,first_name,last_name,email,gender,ip_address +1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 +2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 +3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 +4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 +5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 +6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220 +7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64 +8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13 +9,Gary,Day,gday8@nih.gov,Male,35.81.68.186 +10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100 +11,Raymond,Kelley,rkelleya@fc2.com,Male,213.65.166.67 +12,Gerald,Robinson,grobinsonb@disqus.com,Male,72.232.194.193 +13,Mildred,Martinez,mmartinezc@samsung.com,Female,198.29.112.5 +14,Dennis,Arnold,darnoldd@google.com,Male,86.96.3.250 +15,Judy,Gray,jgraye@opensource.org,Female,79.218.162.245 +16,Theresa,Garza,tgarzaf@epa.gov,Female,21.59.100.54 +17,Gerald,Robertson,grobertsong@csmonitor.com,Male,131.134.82.96 +18,Philip,Hernandez,phernandezh@adobe.com,Male,254.196.137.72 +19,Julia,Gonzalez,jgonzalezi@cam.ac.uk,Female,84.240.227.174 +20,Andrew,Davis,adavisj@patch.com,Male,9.255.67.25 +21,Kimberly,Harper,kharperk@foxnews.com,Female,198.208.120.253 +22,Mark,Martin,mmartinl@marketwatch.com,Male,233.138.182.153 +23,Cynthia,Ruiz,cruizm@google.fr,Female,18.178.187.201 +24,Samuel,Carroll,scarrolln@youtu.be,Male,128.113.96.122 +25,Jennifer,Larson,jlarsono@vinaora.com,Female,98.234.85.95 +26,Ashley,Perry,aperryp@rakuten.co.jp,Female,247.173.114.52 +27,Howard,Rodriguez,hrodriguezq@shutterfly.com,Male,231.188.95.26 +28,Amy,Brooks,abrooksr@theatlantic.com,Female,141.199.174.118 +29,Louise,Warren,lwarrens@adobe.com,Female,96.105.158.28 +30,Tina,Watson,twatsont@myspace.com,Female,251.142.118.177 +31,Janice,Kelley,jkelleyu@creativecommons.org,Female,239.167.34.233 +32,Terry,Mccoy,tmccoyv@bravesites.com,Male,117.201.183.203 +33,Jeffrey,Morgan,jmorganw@surveymonkey.com,Male,78.101.78.149 +34,Louis,Harvey,lharveyx@sina.com.cn,Male,51.50.0.167 +35,Philip,Miller,pmillery@samsung.com,Male,103.255.222.110 +36,Willie,Marshall,wmarshallz@ow.ly,Male,149.219.91.68 +37,Patrick,Lopez,plopez10@redcross.org,Male,250.136.229.89 +38,Adam,Jenkins,ajenkins11@harvard.edu,Male,7.36.112.81 +39,Benjamin,Cruz,bcruz12@linkedin.com,Male,32.38.98.15 +40,Ruby,Hawkins,rhawkins13@gmpg.org,Female,135.171.129.255 +41,Carlos,Barnes,cbarnes14@a8.net,Male,240.197.85.140 +42,Ruby,Griffin,rgriffin15@bravesites.com,Female,19.29.135.24 +43,Sean,Mason,smason16@icq.com,Male,159.219.155.249 +44,Anthony,Payne,apayne17@utexas.edu,Male,235.168.199.218 +45,Steve,Cruz,scruz18@pcworld.com,Male,238.201.81.198 +46,Anthony,Garcia,agarcia19@flavors.me,Male,25.85.10.18 +47,Doris,Lopez,dlopez1a@sphinn.com,Female,245.218.51.238 +48,Susan,Nichols,snichols1b@freewebs.com,Female,199.99.9.61 +49,Wanda,Ferguson,wferguson1c@yahoo.co.jp,Female,236.241.135.21 +50,Andrea,Pierce,apierce1d@google.co.uk,Female,132.40.10.209 +51,Lawrence,Phillips,lphillips1e@jugem.jp,Male,72.226.82.87 +52,Judy,Gilbert,jgilbert1f@multiply.com,Female,196.250.15.142 +53,Eric,Williams,ewilliams1g@joomla.org,Male,222.202.73.126 +54,Ralph,Romero,rromero1h@sogou.com,Male,123.184.125.212 +55,Jean,Wilson,jwilson1i@ocn.ne.jp,Female,176.106.32.194 +56,Lori,Reynolds,lreynolds1j@illinois.edu,Female,114.181.203.22 +57,Donald,Moreno,dmoreno1k@bbc.co.uk,Male,233.249.97.60 +58,Steven,Berry,sberry1l@eepurl.com,Male,186.193.50.50 +59,Theresa,Shaw,tshaw1m@people.com.cn,Female,120.37.71.222 +60,John,Stephens,jstephens1n@nationalgeographic.com,Male,191.87.127.115 +61,Richard,Jacobs,rjacobs1o@state.tx.us,Male,66.210.83.155 +62,Andrew,Lawson,alawson1p@over-blog.com,Male,54.98.36.94 +63,Peter,Morgan,pmorgan1q@rambler.ru,Male,14.77.29.106 +64,Nicole,Garrett,ngarrett1r@zimbio.com,Female,21.127.74.68 +65,Joshua,Kim,jkim1s@edublogs.org,Male,57.255.207.41 +66,Ralph,Roberts,rroberts1t@people.com.cn,Male,222.143.131.109 +67,George,Montgomery,gmontgomery1u@smugmug.com,Male,76.75.111.77 +68,Gerald,Alvarez,galvarez1v@flavors.me,Male,58.157.186.194 +69,Donald,Olson,dolson1w@whitehouse.gov,Male,69.65.74.135 +70,Carlos,Morgan,cmorgan1x@pbs.org,Male,96.20.140.87 +71,Aaron,Stanley,astanley1y@webnode.com,Male,163.119.217.44 +72,Virginia,Long,vlong1z@spiegel.de,Female,204.150.194.182 +73,Robert,Berry,rberry20@tripadvisor.com,Male,104.19.48.241 +74,Antonio,Brooks,abrooks21@unesco.org,Male,210.31.7.24 +75,Ruby,Garcia,rgarcia22@ovh.net,Female,233.218.162.214 +76,Jack,Hanson,jhanson23@blogtalkradio.com,Male,31.55.46.199 +77,Kathryn,Nelson,knelson24@walmart.com,Female,14.189.146.41 +78,Jason,Reed,jreed25@printfriendly.com,Male,141.189.89.255 +79,George,Coleman,gcoleman26@people.com.cn,Male,81.189.221.144 +80,Rose,King,rking27@ucoz.com,Female,212.123.168.231 +81,Johnny,Holmes,jholmes28@boston.com,Male,177.3.93.188 +82,Katherine,Gilbert,kgilbert29@altervista.org,Female,199.215.169.61 +83,Joshua,Thomas,jthomas2a@ustream.tv,Male,0.8.205.30 +84,Julie,Perry,jperry2b@opensource.org,Female,60.116.114.192 +85,Richard,Perry,rperry2c@oracle.com,Male,181.125.70.232 +86,Kenneth,Ruiz,kruiz2d@wikimedia.org,Male,189.105.137.109 +87,Jose,Morgan,jmorgan2e@webnode.com,Male,101.134.215.156 +88,Donald,Campbell,dcampbell2f@goo.ne.jp,Male,102.120.215.84 +89,Debra,Collins,dcollins2g@uol.com.br,Female,90.13.153.235 +90,Jesse,Johnson,jjohnson2h@stumbleupon.com,Male,225.178.125.53 +91,Elizabeth,Stone,estone2i@histats.com,Female,123.184.126.221 +92,Angela,Rogers,arogers2j@goodreads.com,Female,98.104.132.187 +93,Emily,Dixon,edixon2k@mlb.com,Female,39.190.75.57 +94,Albert,Scott,ascott2l@tinypic.com,Male,40.209.13.189 +95,Barbara,Peterson,bpeterson2m@ow.ly,Female,75.249.136.180 +96,Adam,Greene,agreene2n@fastcompany.com,Male,184.173.109.144 +97,Earl,Sanders,esanders2o@hc360.com,Male,247.34.90.117 +98,Angela,Brooks,abrooks2p@mtv.com,Female,10.63.249.126 +99,Harold,Foster,hfoster2q@privacy.gov.au,Male,139.214.40.244 +100,Carl,Meyer,cmeyer2r@disqus.com,Male,204.117.7.88 +101,Michael,Perez,mperez0@chronoengine.com,Male,106.239.70.175 +102,Shawn,Mccoy,smccoy1@reddit.com,Male,24.165.76.182 +103,Kathleen,Payne,kpayne2@cargocollective.com,Female,113.207.168.106 +104,Jimmy,Cooper,jcooper3@cargocollective.com,Male,198.24.63.114 +105,Katherine,Rice,krice4@typepad.com,Female,36.97.186.238 +106,Sarah,Ryan,sryan5@gnu.org,Female,119.117.152.40 +107,Martin,Mcdonald,mmcdonald6@opera.com,Male,8.76.38.115 +108,Frank,Robinson,frobinson7@wunderground.com,Male,186.14.64.194 +109,Jennifer,Franklin,jfranklin8@mail.ru,Female,91.216.3.131 +110,Henry,Welch,hwelch9@list-manage.com,Male,176.35.182.168 +111,Fred,Snyder,fsnydera@reddit.com,Male,217.106.196.54 +112,Amy,Dunn,adunnb@nba.com,Female,95.39.163.195 +113,Kathleen,Meyer,kmeyerc@cdc.gov,Female,164.142.188.214 +114,Steve,Ferguson,sfergusond@reverbnation.com,Male,138.22.204.251 +115,Teresa,Hill,thille@dion.ne.jp,Female,82.84.228.235 +116,Amanda,Harper,aharperf@mail.ru,Female,16.123.56.176 +117,Kimberly,Ray,krayg@xing.com,Female,48.66.48.12 +118,Johnny,Knight,jknighth@jalbum.net,Male,99.30.138.123 +119,Virginia,Freeman,vfreemani@tiny.cc,Female,225.172.182.63 +120,Anna,Austin,aaustinj@diigo.com,Female,62.111.227.148 +121,Willie,Hill,whillk@mail.ru,Male,0.86.232.249 +122,Sean,Harris,sharrisl@zdnet.com,Male,117.165.133.249 +123,Mildred,Adams,madamsm@usatoday.com,Female,163.44.97.46 +124,David,Graham,dgrahamn@zimbio.com,Male,78.13.246.202 +125,Victor,Hunter,vhuntero@ehow.com,Male,64.156.179.139 +126,Aaron,Ruiz,aruizp@weebly.com,Male,34.194.68.78 +127,Benjamin,Brooks,bbrooksq@jalbum.net,Male,20.192.189.107 +128,Lisa,Wilson,lwilsonr@japanpost.jp,Female,199.152.130.217 +129,Benjamin,King,bkings@comsenz.com,Male,29.189.189.213 +130,Christina,Williamson,cwilliamsont@boston.com,Female,194.101.52.60 +131,Jane,Gonzalez,jgonzalezu@networksolutions.com,Female,109.119.12.87 +132,Thomas,Owens,towensv@psu.edu,Male,84.168.213.153 +133,Katherine,Moore,kmoorew@naver.com,Female,183.150.65.24 +134,Jennifer,Stewart,jstewartx@yahoo.com,Female,38.41.244.58 +135,Sara,Tucker,stuckery@topsy.com,Female,181.130.59.184 +136,Harold,Ortiz,hortizz@vkontakte.ru,Male,198.231.63.137 +137,Shirley,James,sjames10@yelp.com,Female,83.27.160.104 +138,Dennis,Johnson,djohnson11@slate.com,Male,183.178.246.101 +139,Louise,Weaver,lweaver12@china.com.cn,Female,1.14.110.18 +140,Maria,Armstrong,marmstrong13@prweb.com,Female,181.142.1.249 +141,Gloria,Cruz,gcruz14@odnoklassniki.ru,Female,178.232.140.243 +142,Diana,Spencer,dspencer15@ifeng.com,Female,125.153.138.244 +143,Kelly,Nguyen,knguyen16@altervista.org,Female,170.13.201.119 +144,Jane,Rodriguez,jrodriguez17@biblegateway.com,Female,12.102.249.81 +145,Scott,Brown,sbrown18@geocities.jp,Male,108.174.99.192 +146,Norma,Cruz,ncruz19@si.edu,Female,201.112.156.197 +147,Marie,Peters,mpeters1a@mlb.com,Female,231.121.197.144 +148,Lillian,Carr,lcarr1b@typepad.com,Female,206.179.164.163 +149,Judy,Nichols,jnichols1c@t-online.de,Female,158.190.209.194 +150,Billy,Long,blong1d@yahoo.com,Male,175.20.23.160 +151,Howard,Reid,hreid1e@exblog.jp,Male,118.99.196.20 +152,Laura,Ferguson,lferguson1f@tuttocitta.it,Female,22.77.87.110 +153,Anne,Bailey,abailey1g@geocities.com,Female,58.144.159.245 +154,Rose,Morgan,rmorgan1h@ehow.com,Female,118.127.97.4 +155,Nicholas,Reyes,nreyes1i@google.ru,Male,50.135.10.252 +156,Joshua,Kennedy,jkennedy1j@house.gov,Male,154.6.163.209 +157,Paul,Watkins,pwatkins1k@upenn.edu,Male,177.236.120.87 +158,Kathryn,Kelly,kkelly1l@businessweek.com,Female,70.28.61.86 +159,Adam,Armstrong,aarmstrong1m@techcrunch.com,Male,133.235.24.202 +160,Norma,Wallace,nwallace1n@phoca.cz,Female,241.119.227.128 +161,Timothy,Reyes,treyes1o@google.cn,Male,86.28.23.26 +162,Elizabeth,Patterson,epatterson1p@sun.com,Female,139.97.159.149 +163,Edward,Gomez,egomez1q@google.fr,Male,158.103.108.255 +164,David,Cox,dcox1r@friendfeed.com,Male,206.80.80.58 +165,Brenda,Wood,bwood1s@over-blog.com,Female,217.207.44.179 +166,Adam,Walker,awalker1t@blogs.com,Male,253.211.54.93 +167,Michael,Hart,mhart1u@wix.com,Male,230.206.200.22 +168,Jesse,Ellis,jellis1v@google.co.uk,Male,213.254.162.52 +169,Janet,Powell,jpowell1w@un.org,Female,27.192.194.86 +170,Helen,Ford,hford1x@creativecommons.org,Female,52.160.102.168 +171,Gerald,Carpenter,gcarpenter1y@about.me,Male,36.30.194.218 +172,Kathryn,Oliver,koliver1z@army.mil,Female,202.63.103.69 +173,Alan,Berry,aberry20@gov.uk,Male,246.157.112.211 +174,Harry,Andrews,handrews21@ameblo.jp,Male,195.108.0.12 +175,Andrea,Hall,ahall22@hp.com,Female,149.162.163.28 +176,Barbara,Wells,bwells23@behance.net,Female,224.70.72.1 +177,Anne,Wells,awells24@apache.org,Female,180.168.81.153 +178,Harry,Harper,hharper25@rediff.com,Male,151.87.130.21 +179,Jack,Ray,jray26@wufoo.com,Male,220.109.38.178 +180,Phillip,Hamilton,phamilton27@joomla.org,Male,166.40.47.30 +181,Shirley,Hunter,shunter28@newsvine.com,Female,97.209.140.194 +182,Arthur,Daniels,adaniels29@reuters.com,Male,5.40.240.86 +183,Virginia,Rodriguez,vrodriguez2a@walmart.com,Female,96.80.164.184 +184,Christina,Ryan,cryan2b@hibu.com,Female,56.35.5.52 +185,Theresa,Mendoza,tmendoza2c@vinaora.com,Female,243.42.0.210 +186,Jason,Cole,jcole2d@ycombinator.com,Male,198.248.39.129 +187,Phillip,Bryant,pbryant2e@rediff.com,Male,140.39.116.251 +188,Adam,Torres,atorres2f@sun.com,Male,101.75.187.135 +189,Margaret,Johnston,mjohnston2g@ucsd.edu,Female,159.30.69.149 +190,Paul,Payne,ppayne2h@hhs.gov,Male,199.234.140.220 +191,Todd,Willis,twillis2i@businessweek.com,Male,191.59.136.214 +192,Willie,Oliver,woliver2j@noaa.gov,Male,44.212.35.197 +193,Frances,Robertson,frobertson2k@go.com,Female,31.117.65.136 +194,Gregory,Hawkins,ghawkins2l@joomla.org,Male,91.3.22.49 +195,Lisa,Perkins,lperkins2m@si.edu,Female,145.95.31.186 +196,Jacqueline,Anderson,janderson2n@cargocollective.com,Female,14.176.0.187 +197,Shirley,Diaz,sdiaz2o@ucla.edu,Female,207.12.95.46 +198,Nicole,Meyer,nmeyer2p@flickr.com,Female,231.79.115.13 +199,Mary,Gray,mgray2q@constantcontact.com,Female,210.116.64.253 +200,Jean,Mcdonald,jmcdonald2r@baidu.com,Female,122.239.235.117 diff --git a/tests/functional/basic/data/summary_expected.csv b/tests/functional/basic/data/summary_expected.csv new file mode 100644 index 000000000..0d938030d --- /dev/null +++ b/tests/functional/basic/data/summary_expected.csv @@ -0,0 +1,3 @@ +gender,ct +Female,40 +Male,60 diff --git a/tests/functional/basic/data/summary_expected_update.csv b/tests/functional/basic/data/summary_expected_update.csv new file mode 100644 index 000000000..1dd7590a8 --- /dev/null +++ b/tests/functional/basic/data/summary_expected_update.csv @@ -0,0 +1,3 @@ +gender,ct +Female,94 +Male,106 diff --git a/tests/functional/basic/data/varchar10_seed.sql b/tests/functional/basic/data/varchar10_seed.sql new file mode 100644 index 000000000..361869620 --- /dev/null +++ b/tests/functional/basic/data/varchar10_seed.sql @@ -0,0 +1,110 @@ +create table {schema}.seed ( + id BIGSERIAL PRIMARY KEY, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + "GenDEr" VARCHAR(10), + ip_address VARCHAR(20) +); + +insert into {schema}.seed (first_name, last_name, email, "GenDEr", ip_address) values +('Jack', 'Hunter', 'jhunter0@pbs.org', 'Male', '59.80.20.168'), +('Kathryn', 'Walker', 'kwalker1@ezinearticles.com', 'Female', '194.121.179.35'), +('Gerald', 'Ryan', 'gryan2@com.com', 'Male', '11.3.212.243'), +('Bonnie', 'Spencer', 'bspencer3@ameblo.jp', 'Female', '216.32.196.175'), +('Harold', 'Taylor', 'htaylor4@people.com.cn', 'Male', '253.10.246.136'), +('Jacqueline', 'Griffin', 'jgriffin5@t.co', 'Female', '16.13.192.220'), +('Wanda', 'Arnold', 'warnold6@google.nl', 'Female', '232.116.150.64'), +('Craig', 'Ortiz', 'cortiz7@sciencedaily.com', 'Male', '199.126.106.13'), +('Gary', 'Day', 'gday8@nih.gov', 'Male', '35.81.68.186'), +('Rose', 'Wright', 'rwright9@yahoo.co.jp', 'Female', '236.82.178.100'), +('Raymond', 'Kelley', 'rkelleya@fc2.com', 'Male', '213.65.166.67'), +('Gerald', 'Robinson', 'grobinsonb@disqus.com', 'Male', '72.232.194.193'), +('Mildred', 'Martinez', 'mmartinezc@samsung.com', 'Female', '198.29.112.5'), +('Dennis', 'Arnold', 'darnoldd@google.com', 'Male', '86.96.3.250'), +('Judy', 'Gray', 'jgraye@opensource.org', 'Female', '79.218.162.245'), +('Theresa', 'Garza', 'tgarzaf@epa.gov', 'Female', '21.59.100.54'), +('Gerald', 'Robertson', 'grobertsong@csmonitor.com', 'Male', '131.134.82.96'), +('Philip', 'Hernandez', 'phernandezh@adobe.com', 'Male', '254.196.137.72'), +('Julia', 'Gonzalez', 'jgonzalezi@cam.ac.uk', 'Female', '84.240.227.174'), +('Andrew', 'Davis', 'adavisj@patch.com', 'Male', '9.255.67.25'), +('Kimberly', 'Harper', 'kharperk@foxnews.com', 'Female', '198.208.120.253'), +('Mark', 'Martin', 'mmartinl@marketwatch.com', 'Male', '233.138.182.153'), +('Cynthia', 'Ruiz', 'cruizm@google.fr', 'Female', '18.178.187.201'), +('Samuel', 'Carroll', 'scarrolln@youtu.be', 'Male', '128.113.96.122'), +('Jennifer', 'Larson', 'jlarsono@vinaora.com', 'Female', '98.234.85.95'), +('Ashley', 'Perry', 'aperryp@rakuten.co.jp', 'Female', '247.173.114.52'), +('Howard', 'Rodriguez', 'hrodriguezq@shutterfly.com', 'Male', '231.188.95.26'), +('Amy', 'Brooks', 'abrooksr@theatlantic.com', 'Female', '141.199.174.118'), +('Louise', 'Warren', 'lwarrens@adobe.com', 'Female', '96.105.158.28'), +('Tina', 'Watson', 'twatsont@myspace.com', 'Female', '251.142.118.177'), +('Janice', 'Kelley', 'jkelleyu@creativecommons.org', 'Female', '239.167.34.233'), +('Terry', 'Mccoy', 'tmccoyv@bravesites.com', 'Male', '117.201.183.203'), +('Jeffrey', 'Morgan', 'jmorganw@surveymonkey.com', 'Male', '78.101.78.149'), +('Louis', 'Harvey', 'lharveyx@sina.com.cn', 'Male', '51.50.0.167'), +('Philip', 'Miller', 'pmillery@samsung.com', 'Male', '103.255.222.110'), +('Willie', 'Marshall', 'wmarshallz@ow.ly', 'Male', '149.219.91.68'), +('Patrick', 'Lopez', 'plopez10@redcross.org', 'Male', '250.136.229.89'), +('Adam', 'Jenkins', 'ajenkins11@harvard.edu', 'Male', '7.36.112.81'), +('Benjamin', 'Cruz', 'bcruz12@linkedin.com', 'Male', '32.38.98.15'), +('Ruby', 'Hawkins', 'rhawkins13@gmpg.org', 'Female', '135.171.129.255'), +('Carlos', 'Barnes', 'cbarnes14@a8.net', 'Male', '240.197.85.140'), +('Ruby', 'Griffin', 'rgriffin15@bravesites.com', 'Female', '19.29.135.24'), +('Sean', 'Mason', 'smason16@icq.com', 'Male', '159.219.155.249'), +('Anthony', 'Payne', 'apayne17@utexas.edu', 'Male', '235.168.199.218'), +('Steve', 'Cruz', 'scruz18@pcworld.com', 'Male', '238.201.81.198'), +('Anthony', 'Garcia', 'agarcia19@flavors.me', 'Male', '25.85.10.18'), +('Doris', 'Lopez', 'dlopez1a@sphinn.com', 'Female', '245.218.51.238'), +('Susan', 'Nichols', 'snichols1b@freewebs.com', 'Female', '199.99.9.61'), +('Wanda', 'Ferguson', 'wferguson1c@yahoo.co.jp', 'Female', '236.241.135.21'), +('Andrea', 'Pierce', 'apierce1d@google.co.uk', 'Female', '132.40.10.209'), +('Lawrence', 'Phillips', 'lphillips1e@jugem.jp', 'Male', '72.226.82.87'), +('Judy', 'Gilbert', 'jgilbert1f@multiply.com', 'Female', '196.250.15.142'), +('Eric', 'Williams', 'ewilliams1g@joomla.org', 'Male', '222.202.73.126'), +('Ralph', 'Romero', 'rromero1h@sogou.com', 'Male', '123.184.125.212'), +('Jean', 'Wilson', 'jwilson1i@ocn.ne.jp', 'Female', '176.106.32.194'), +('Lori', 'Reynolds', 'lreynolds1j@illinois.edu', 'Female', '114.181.203.22'), +('Donald', 'Moreno', 'dmoreno1k@bbc.co.uk', 'Male', '233.249.97.60'), +('Steven', 'Berry', 'sberry1l@eepurl.com', 'Male', '186.193.50.50'), +('Theresa', 'Shaw', 'tshaw1m@people.com.cn', 'Female', '120.37.71.222'), +('John', 'Stephens', 'jstephens1n@nationalgeographic.com', 'Male', '191.87.127.115'), +('Richard', 'Jacobs', 'rjacobs1o@state.tx.us', 'Male', '66.210.83.155'), +('Andrew', 'Lawson', 'alawson1p@over-blog.com', 'Male', '54.98.36.94'), +('Peter', 'Morgan', 'pmorgan1q@rambler.ru', 'Male', '14.77.29.106'), +('Nicole', 'Garrett', 'ngarrett1r@zimbio.com', 'Female', '21.127.74.68'), +('Joshua', 'Kim', 'jkim1s@edublogs.org', 'Male', '57.255.207.41'), +('Ralph', 'Roberts', 'rroberts1t@people.com.cn', 'Male', '222.143.131.109'), +('George', 'Montgomery', 'gmontgomery1u@smugmug.com', 'Male', '76.75.111.77'), +('Gerald', 'Alvarez', 'galvarez1v@flavors.me', 'Male', '58.157.186.194'), +('Donald', 'Olson', 'dolson1w@whitehouse.gov', 'Male', '69.65.74.135'), +('Carlos', 'Morgan', 'cmorgan1x@pbs.org', 'Male', '96.20.140.87'), +('Aaron', 'Stanley', 'astanley1y@webnode.com', 'Male', '163.119.217.44'), +('Virginia', 'Long', 'vlong1z@spiegel.de', 'Female', '204.150.194.182'), +('Robert', 'Berry', 'rberry20@tripadvisor.com', 'Male', '104.19.48.241'), +('Antonio', 'Brooks', 'abrooks21@unesco.org', 'Male', '210.31.7.24'), +('Ruby', 'Garcia', 'rgarcia22@ovh.net', 'Female', '233.218.162.214'), +('Jack', 'Hanson', 'jhanson23@blogtalkradio.com', 'Male', '31.55.46.199'), +('Kathryn', 'Nelson', 'knelson24@walmart.com', 'Female', '14.189.146.41'), +('Jason', 'Reed', 'jreed25@printfriendly.com', 'Male', '141.189.89.255'), +('George', 'Coleman', 'gcoleman26@people.com.cn', 'Male', '81.189.221.144'), +('Rose', 'King', 'rking27@ucoz.com', 'Female', '212.123.168.231'), +('Johnny', 'Holmes', 'jholmes28@boston.com', 'Male', '177.3.93.188'), +('Katherine', 'Gilbert', 'kgilbert29@altervista.org', 'Female', '199.215.169.61'), +('Joshua', 'Thomas', 'jthomas2a@ustream.tv', 'Male', '0.8.205.30'), +('Julie', 'Perry', 'jperry2b@opensource.org', 'Female', '60.116.114.192'), +('Richard', 'Perry', 'rperry2c@oracle.com', 'Male', '181.125.70.232'), +('Kenneth', 'Ruiz', 'kruiz2d@wikimedia.org', 'Male', '189.105.137.109'), +('Jose', 'Morgan', 'jmorgan2e@webnode.com', 'Male', '101.134.215.156'), +('Donald', 'Campbell', 'dcampbell2f@goo.ne.jp', 'Male', '102.120.215.84'), +('Debra', 'Collins', 'dcollins2g@uol.com.br', 'Female', '90.13.153.235'), +('Jesse', 'Johnson', 'jjohnson2h@stumbleupon.com', 'Male', '225.178.125.53'), +('Elizabeth', 'Stone', 'estone2i@histats.com', 'Female', '123.184.126.221'), +('Angela', 'Rogers', 'arogers2j@goodreads.com', 'Female', '98.104.132.187'), +('Emily', 'Dixon', 'edixon2k@mlb.com', 'Female', '39.190.75.57'), +('Albert', 'Scott', 'ascott2l@tinypic.com', 'Male', '40.209.13.189'), +('Barbara', 'Peterson', 'bpeterson2m@ow.ly', 'Female', '75.249.136.180'), +('Adam', 'Greene', 'agreene2n@fastcompany.com', 'Male', '184.173.109.144'), +('Earl', 'Sanders', 'esanders2o@hc360.com', 'Male', '247.34.90.117'), +('Angela', 'Brooks', 'abrooks2p@mtv.com', 'Female', '10.63.249.126'), +('Harold', 'Foster', 'hfoster2q@privacy.gov.au', 'Male', '139.214.40.244'), +('Carl', 'Meyer', 'cmeyer2r@disqus.com', 'Male', '204.117.7.88'); diff --git a/tests/functional/basic/data/varchar300_seed.sql b/tests/functional/basic/data/varchar300_seed.sql new file mode 100644 index 000000000..e4e5dffdf --- /dev/null +++ b/tests/functional/basic/data/varchar300_seed.sql @@ -0,0 +1,103 @@ +ALTER TABLE {schema}.seed ALTER COLUMN "GenDEr" TYPE varchar(300); + +insert into {schema}.seed (first_name, last_name, email, "GenDEr", ip_address) values +('Annie', 'Reynolds', 'areynolds0@nifty.com', 'Amerisource Bergen', '133.30.242.211'), +('Doris', 'Wood', 'dwood1@skyrock.com', 'Bliss World, LLC', '128.229.89.207'), +('Andrea', 'Ray', 'aray2@google.co.jp', 'Nelco Laboratories, Inc.', '109.74.153.45'), +('Frank', 'Morgan', 'fmorgan3@1688.com', 'ALK-Abello, Inc.', '252.211.209.9'), +('Angela', 'Stanley', 'astanley4@google.fr', 'Gemini Pharmaceuticals, Inc. dba ONDRA Pharmaceuticals', '134.142.194.184'), +('Ruby', 'Jordan', 'rjordan5@nymag.com', 'Watson Pharma, Inc.', '195.104.60.172'), +('Kathleen', 'Ryan', 'kryan6@scientificamerican.com', 'SHISEIDO AMERICAS CORPORATION', '209.110.160.192'), +('Margaret', 'Jacobs', 'mjacobs7@example.com', 'Cardinal Health', '72.36.52.20'), +('Ernest', 'Brown', 'ebrown8@360.cn', 'West-ward Pharmaceutical Corp', '138.157.61.255'), +('Elizabeth', 'Phillips', 'ephillips9@japanpost.jp', 'Cellex-C International Inc', '68.46.195.188'), +('Annie', 'Ellis', 'aellisa@weather.com', 'NATURE REPUBLIC CO., LTD.', '163.128.214.142'), +('Melissa', 'Olson', 'molsonb@theguardian.com', 'Nelco Laboratories, Inc.', '202.22.153.188'), +('Timothy', 'Martinez', 'tmartinezc@zimbio.com', 'Lake Erie Medical & Surgical Supply DBA Quality Care Products LLC', '45.64.205.47'), +('Mark', 'Nelson', 'mnelsond@bloomberg.com', '7-Eleven', '91.99.195.160'), +('Kenneth', 'Hart', 'kharte@berkeley.edu', 'Preferred Pharmaceuticals, Inc.', '207.240.9.102'), +('Kathryn', 'White', 'kwhitef@csmonitor.com', 'Cantrell Drug Company', '191.178.162.18'), +('Mary', 'Greene', 'mgreeneg@usnews.com', 'Neutrogena Corporation', '251.226.65.64'), +('Bruce', 'Peters', 'bpetersh@blogspot.com', 'Sun & Skin Care Research, LLC', '153.227.91.121'), +('Albert', 'Armstrong', 'aarmstrongi@weather.com', 'Access Business Group LLC', '199.146.159.228'), +('Beverly', 'Gray', 'bgrayj@spiegel.de', 'Church & Dwight Co., Inc.', '47.3.135.226'), +('Catherine', 'Taylor', 'ctaylork@walmart.com', 'Matrixx Initiatives, Inc.', '82.24.129.147'), +('Paula', 'Bradley', 'pbradleyl@edublogs.org', 'Nash-Finch Company', '14.145.193.163'), +('Terry', 'Campbell', 'tcampbellm@artisteer.com', 'MedVantx, Inc.', '89.181.95.177'), +('Bruce', 'Stevens', 'bstevensn@ucla.edu', 'Global Pharmaceuticals', '128.81.126.144'), +('Ruby', 'Bishop', 'rbishopo@telegraph.co.uk', 'General Injectables & Vaccines, Inc.', '191.191.17.173'), +('Denise', 'Duncan', 'dduncanp@reference.com', 'Bare Escentuals Beauty, Inc.', '150.207.3.163'), +('Dennis', 'Perkins', 'dperkinsq@1und1.de', 'Altaire Pharmaceuticals Inc.', '21.150.103.133'), +('Brandon', 'Ray', 'brayr@psu.edu', 'Meijer Distribution Inc', '216.53.187.191'), +('Ernest', 'Graham', 'egrahams@tinyurl.com', 'BioComp Pharma, Inc.', '49.85.236.162'), +('Denise', 'Matthews', 'dmatthewst@digg.com', 'Procter & Gamble Manufacturing Co.', '160.4.119.137'), +('Randy', 'Alexander', 'ralexanderu@goo.gl', 'Reckitt Benckiser Pharmaceuticals Inc', '211.72.176.12'), +('Aaron', 'Jackson', 'ajacksonv@gizmodo.com', 'Molton Brown LTD (UK)', '226.178.48.73'), +('Wanda', 'Turner', 'wturnerw@reverbnation.com', 'American Health Packaging', '43.22.122.56'), +('Stephen', 'Ferguson', 'sfergusonx@kickstarter.com', 'Amneal Pharmaceuticals of New York, LLC', '110.211.112.233'), +('Jane', 'Bradley', 'jbradleyy@usgs.gov', 'Kroger Company', '186.153.255.125'), +('Phillip', 'Wood', 'pwoodz@about.com', 'Unit Dose Services', '112.65.6.93'), +('Jeffrey', 'Howell', 'jhowell10@symantec.com', 'Midlothian Laboratories', '232.92.208.248'), +('Howard', 'Harvey', 'hharvey11@nhs.uk', 'Novartis Pharmaceuticals Corporation', '50.212.26.218'), +('Benjamin', 'Johnston', 'bjohnston12@diigo.com', 'Nelco Laboratories, Inc.', '131.109.13.9'), +('Ernest', 'Burke', 'eburke13@toplist.cz', 'Apotex Corp.', '151.176.178.175'), +('Joe', 'Wright', 'jwright14@mapy.cz', 'MULTALER & CIE S.A.', '233.55.33.63'), +('Ronald', 'Griffin', 'rgriffin15@topsy.com', 'Gavis Pharmaceuticals, LLC', '174.233.67.86'), +('Susan', 'Oliver', 'soliver16@goo.gl', 'Bath & Body Works, Inc.', '104.171.43.12'), +('Karen', 'Cox', 'kcox17@hp.com', 'Home Sweet Homeopathics', '225.51.182.192'), +('Antonio', 'Larson', 'alarson18@gov.uk', 'Eight and Company', '243.118.98.188'), +('Brandon', 'Cook', 'bcook19@mozilla.com', 'Chain Drug Consortium, LLC', '38.64.44.255'), +('Gary', 'Gray', 'ggray1a@alexa.com', 'Lil'' Drug Store Products, Inc', '43.34.161.60'), +('Doris', 'Harrison', 'dharrison1b@wiley.com', 'Dispensing Solutions, Inc.', '153.66.74.140'), +('Clarence', 'Perry', 'cperry1c@issuu.com', 'Nelco Laboratories, Inc.', '14.72.110.59'), +('Emily', 'George', 'egeorge1d@blogtalkradio.com', 'State of Florida DOH Central Pharmacy', '148.35.114.224'), +('Dennis', 'Larson', 'dlarson1e@trellian.com', 'G&W Laboratories, Inc.', '134.158.117.11'), +('Ashley', 'Peters', 'apeters1f@de.vu', 'Mylan Pharmaceuticals Inc.', '50.193.252.146'), +('Douglas', 'Andrews', 'dandrews1g@mac.com', 'Jubilant HollisterStier LLC', '159.134.237.86'), +('Craig', 'Dunn', 'cdunn1h@cornell.edu', 'Antigen Laboratories, Inc.', '227.11.100.112'), +('Heather', 'Black', 'hblack1i@harvard.edu', 'Hospira, Inc.', '61.9.121.22'), +('Shirley', 'Ruiz', 'sruiz1j@tmall.com', 'Hankuk Bowonbio Co., Ltd', '171.144.250.254'), +('Carl', 'Martinez', 'cmartinez1k@geocities.jp', 'ALK-Abello, Inc.', '128.216.69.116'), +('Stephen', 'Anderson', 'sanderson1l@odnoklassniki.ru', 'Cardinal Health', '145.154.63.186'), +('Diana', 'Payne', 'dpayne1m@ftc.gov', 'Pharmaceutical Associates, Inc.', '98.9.155.136'), +('Judy', 'Gonzalez', 'jgonzalez1n@walmart.com', 'SHISEIDO CO., LTD.', '73.96.109.149'), +('Steve', 'Cole', 'scole1o@flickr.com', 'Walgreen Company', '251.244.20.117'), +('Johnny', 'Ellis', 'jellis1p@time.com', 'Jubilant HollisterStier LLC', '188.153.76.182'), +('Andrea', 'Hamilton', 'ahamilton1q@dailymail.co.uk', 'ALK-Abello, Inc.', '229.58.149.141'), +('Sean', 'Kennedy', 'skennedy1r@nifty.com', 'Newton Laboratories, Inc.', '227.105.251.134'), +('Sara', 'Grant', 'sgrant1s@flickr.com', 'Rubbermaid Commercial Products LLC', '96.211.162.73'), +('Joan', 'Bennett', 'jbennett1t@forbes.com', 'Nelco Laboratories, Inc.', '143.27.240.163'), +('Judith', 'Daniels', 'jdaniels1u@theguardian.com', 'Newton Laboratories, Inc.', '164.99.249.153'), +('Irene', 'Bennett', 'ibennett1v@comsenz.com', 'Cellab Co., Ltd.', '112.104.12.122'), +('Katherine', 'Perez', 'kperez1w@phpbb.com', 'Temple Industrial Welding Supply Co', '211.31.214.131'), +('Jean', 'Kim', 'jkim1x@umich.edu', 'Bryant Ranch Prepack', '245.252.150.110'), +('Walter', 'Hernandez', 'whernandez1y@nbcnews.com', 'Virtus Pharmaceuticals LLC', '200.201.83.21'), +('Larry', 'Scott', 'lscott1z@quantcast.com', 'BIOKEY INC.', '122.141.109.98'), +('Gerald', 'Palmer', 'gpalmer20@usgs.gov', 'JAFRA COSMETICS INTERNATIONAL', '60.173.159.145'), +('Harry', 'Andrews', 'handrews21@alexa.com', 'NCS HealthCare of KY, Inc dba Vangard Labs', '210.64.37.91'), +('Jerry', 'Morrison', 'jmorrison22@drupal.org', 'Teva Pharmaceuticals USA Inc', '83.190.174.61'), +('Irene', 'Diaz', 'idiaz23@joomla.org', 'Dolgencorp, LLC', '214.16.44.235'), +('Brenda', 'Hansen', 'bhansen24@wisc.edu', 'REMEDYREPACK INC.', '167.231.200.232'), +('Carlos', 'Williamson', 'cwilliamson25@w3.org', 'Kroger Company', '251.202.210.204'), +('David', 'Fuller', 'dfuller26@canalblog.com', 'Supervalu Inc', '175.125.205.131'), +('Norma', 'Bishop', 'nbishop27@jugem.jp', 'Mylan Institutional Inc.', '208.162.25.149'), +('Brenda', 'Daniels', 'bdaniels28@mediafire.com', 'Space Brands Limited', '92.235.250.138'), +('Kathy', 'Reed', 'kreed29@prweb.com', 'Rugby Laboratories Inc.', '182.114.174.63'), +('Anthony', 'Long', 'along2a@dropbox.com', 'Fresenius Kabi USA, LLC', '160.146.121.173'), +('Craig', 'Palmer', 'cpalmer2b@desdev.cn', 'Bio-Pharm, Inc.', '135.77.134.24'), +('Rachel', 'Banks', 'rbanks2c@devhub.com', 'Sam''s West Inc', '35.72.5.193'), +('Kenneth', 'Peters', 'kpeters2d@ocn.ne.jp', 'International Labs, Inc.', '11.38.191.65'), +('Susan', 'Clark', 'sclark2e@ed.gov', 'Shionogi Inc.', '19.243.67.80'), +('Walter', 'Sullivan', 'wsullivan2f@vinaora.com', 'STAT Rx USA LLC', '154.137.170.227'), +('Kathleen', 'Wood', 'kwood2g@salon.com', 'Freds Inc', '155.54.131.149'), +('Phyllis', 'Henderson', 'phenderson2h@walmart.com', 'REMEDYREPACK INC.', '146.65.150.251'), +('Cheryl', 'Wells', 'cwells2i@gov.uk', 'Rebel Distributors Corp', '69.127.148.31'), +('Rose', 'Bradley', 'rbradley2j@un.org', 'Hi-Tech Pharmacal Co., Inc.', '150.101.165.102'), +('Aaron', 'Moreno', 'amoreno2k@tinypic.com', 'Pharmacia and Upjohn Company', '50.27.226.40'), +('Amy', 'Campbell', 'acampbell2l@auda.org.au', 'Chi Research, Inc.', '242.64.63.241'), +('Rebecca', 'Butler', 'rbutler2m@godaddy.com', 'Cardinal Health', '40.55.159.66'), +('Justin', 'Rodriguez', 'jrodriguez2n@meetup.com', 'Hikma Pharmaceutical', '118.9.132.156'), +('Donald', 'Nelson', 'dnelson2o@narod.ru', 'Nature''s Way Products, Inc.', '165.174.28.134'), +('Edward', 'Lawson', 'elawson2p@addtoany.com', 'Apotheca Company', '135.17.238.170'), +('Paul', 'Bell', 'pbell2q@simplemachines.org', 'Washington Homeopathic Products', '235.149.137.62'), +('Mark', 'Rose', 'mrose2r@google.pl', 'AMERICAN SALES COMPANY', '164.108.170.187'); diff --git a/tests/functional/basic/test_basic.py b/tests/functional/basic/test_basic.py new file mode 100644 index 000000000..52ce567b0 --- /dev/null +++ b/tests/functional/basic/test_basic.py @@ -0,0 +1,20 @@ +from dbt.tests.util import get_manifest, run_dbt +import pytest + + +my_model_sql = """ + select 1 as fun +""" + + +@pytest.fixture(scope="class") +def models(): + return {"my_model.sql": my_model_sql} + + +def test_basic(project): + # Tests that a project with a single model works + results = run_dbt(["run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + assert "model.test.my_model" in manifest.nodes diff --git a/tests/functional/basic/test_invalid_reference.py b/tests/functional/basic/test_invalid_reference.py new file mode 100644 index 000000000..229272f05 --- /dev/null +++ b/tests/functional/basic/test_invalid_reference.py @@ -0,0 +1,28 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +descendant_sql = """ +-- should be ref('model') +select * from {{ ref(model) }} +""" + + +model_sql = """ +select 1 as id +""" + + +@pytest.fixture(scope="class") +def models(): + return { + "descendant.sql": descendant_sql, + "model.sql": model_sql, + } + + +def test_undefined_value(project): + # Tests that a project with an invalid reference fails + with pytest.raises(CompilationError): + run_dbt(["compile"]) diff --git a/tests/functional/basic/test_jaffle_shop.py b/tests/functional/basic/test_jaffle_shop.py new file mode 100644 index 000000000..3f4ab0b23 --- /dev/null +++ b/tests/functional/basic/test_jaffle_shop.py @@ -0,0 +1,25 @@ +from dbt.tests.util import run_dbt, get_manifest, run_dbt_and_capture, write_file + +from tests.functional.projects import JaffleShop + + +class TestBasic(JaffleShop): + def test_basic(self, project): + # test .dbtignore works + write_file("models/ignore*.sql\nignore_folder", project.project_root, ".dbtignore") + # Create the data from seeds + results = run_dbt(["seed"]) + + # Tests that the jaffle_shop project runs + results = run_dbt(["run"]) + assert len(results) == 5 + manifest = get_manifest(project.project_root) + assert "model.jaffle_shop.orders" in manifest.nodes + + def test_execution_time_format_is_humanized(self, project): + # Create the data from seeds + run_dbt(["seed"]) + _, log_output = run_dbt_and_capture(["run"]) + + assert " in 0 hours 0 minutes and " in log_output + assert " seconds" in log_output diff --git a/tests/functional/basic/test_mixed_case_db.py b/tests/functional/basic/test_mixed_case_db.py new file mode 100644 index 000000000..e41fd39f9 --- /dev/null +++ b/tests/functional/basic/test_mixed_case_db.py @@ -0,0 +1,47 @@ +from dbt.tests.util import get_manifest, run_dbt +import pytest + + +model_sql = """ + select 1 as id +""" + + +@pytest.fixture(scope="class") +def models(): + return {"model.sql": model_sql} + + +@pytest.fixture(scope="class") +def dbt_profile_data(unique_schema): + + return { + "test": { + "outputs": { + "default": { + "type": "postgres", + "threads": 4, + "host": "localhost", + "port": 5432, + "user": "root", + "pass": "password", + "dbname": "dbtMixedCase", + "schema": unique_schema, + }, + }, + "target": "default", + }, + } + + +def test_basic(project_root, project): + + assert project.database == "dbtMixedCase" + + # Tests that a project with a single model works + results = run_dbt(["run"]) + assert len(results) == 1 + manifest = get_manifest(project_root) + assert "model.test.model" in manifest.nodes + # Running a second time works + run_dbt(["run"]) diff --git a/tests/functional/basic/test_project.py b/tests/functional/basic/test_project.py new file mode 100644 index 000000000..562c3db26 --- /dev/null +++ b/tests/functional/basic/test_project.py @@ -0,0 +1,120 @@ +import os +from pathlib import Path + +from dbt.tests.util import run_dbt, update_config_file, write_config_file +from dbt.exceptions import ProjectContractError +import pytest +import yaml + + +simple_model_sql = """ +select true as my_column +""" + +simple_model_yml = """ +models: + - name: simple_model + description: "is sythentic data ok? my column:" + columns: + - name: my_column + description: asked and answered +""" + + +class TestSchemaYmlVersionMissing: + @pytest.fixture(scope="class") + def models(self): + return {"simple_model.sql": simple_model_sql, "simple_model.yml": simple_model_yml} + + def test_empty_version(self, project): + run_dbt(["run"], expect_pass=True) + + +class TestProjectConfigVersionMissing: + # default dbt_project.yml has config-version: 2 + @pytest.fixture(scope="class") + def project_config_remove(self): + return ["config-version"] + + def test_empty_version(self, project): + run_dbt(["run"], expect_pass=True) + + +class TestProjectYamlVersionMissing: + # default dbt_project.yml does not fill version + + def test_empty_version(self, project): + run_dbt(["run"], expect_pass=True) + + +class TestProjectYamlVersionValid: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"version": "1.0.0"} + + def test_valid_version(self, project): + run_dbt(["run"], expect_pass=True) + + +class TestProjectYamlVersionInvalid: + def test_invalid_version(self, project): + # we need to run it so the project gets set up first, otherwise we hit the semver error in setting up the test project + run_dbt() + update_config_file({"version": "invalid"}, "dbt_project.yml") + with pytest.raises(ProjectContractError) as excinfo: + run_dbt() + assert "at path ['version']: 'invalid' is not valid under any of the given schemas" in str( + excinfo.value + ) + + +class TestProjectDbtCloudConfig: + @pytest.fixture(scope="class") + def models(self): + return {"simple_model.sql": simple_model_sql, "simple_model.yml": simple_model_yml} + + def test_dbt_cloud(self, project): + run_dbt(["parse"], expect_pass=True) + conf = yaml.safe_load( + Path(os.path.join(project.project_root, "dbt_project.yml")).read_text() + ) + assert conf == { + "name": "test", + "profile": "test", + "flags": {"send_anonymous_usage_stats": False}, + } + + config = { + "name": "test", + "profile": "test", + "flags": {"send_anonymous_usage_stats": False}, + "dbt-cloud": { + "account_id": "123", + "application": "test", + "environment": "test", + "api_key": "test", + }, + } + write_config_file(config, project.project_root, "dbt_project.yml") + run_dbt(["parse"], expect_pass=True) + conf = yaml.safe_load( + Path(os.path.join(project.project_root, "dbt_project.yml")).read_text() + ) + assert conf == config + + +class TestProjectDbtCloudConfigString: + @pytest.fixture(scope="class") + def models(self): + return {"simple_model.sql": simple_model_sql, "simple_model.yml": simple_model_yml} + + def test_dbt_cloud_invalid(self, project): + run_dbt() + config = {"name": "test", "profile": "test", "dbt-cloud": "Some string"} + update_config_file(config, "dbt_project.yml") + expected_err = ( + "at path ['dbt-cloud']: 'Some string' is not valid under any of the given schemas" + ) + with pytest.raises(ProjectContractError) as excinfo: + run_dbt() + assert expected_err in str(excinfo.value) diff --git a/tests/functional/basic/test_simple_reference.py b/tests/functional/basic/test_simple_reference.py new file mode 100644 index 000000000..5de7f8df9 --- /dev/null +++ b/tests/functional/basic/test_simple_reference.py @@ -0,0 +1,271 @@ +from dbt.tests.util import check_relations_equal, copy_file, read_file, run_dbt +import pytest + + +ephemeral_copy_sql = """ +{{ + config( + materialized = "ephemeral" + ) +}} + +select * from {{ this.schema }}.users +""" + +ephemeral_summary_sql = """ +{{ + config( + materialized = "table" + ) +}} + +select gender, count(*) as ct from {{ref('ephemeral_copy')}} +group by gender +order by gender asc +""" + +incremental_copy_sql = """ +{{ + config( + materialized = "incremental" + ) +}} + +select * from {{ this.schema }}.users + +{% if is_incremental() %} + + where id > (select max(id) from {{this}}) + +{% endif %} +""" + +incremental_summary_sql = """ +{{ + config( + materialized = "table", + ) +}} + +select gender, count(*) as ct from {{ref('incremental_copy')}} +group by gender +order by gender asc +""" + +materialized_copy_sql = """ +{{ + config( + materialized = "table" + ) +}} + +select * from {{ this.schema }}.users +""" + +materialized_summary_sql = """ +{{ + config( + materialized = "table" + ) +}} + +select gender, count(*) as ct from {{ref('materialized_copy')}} +group by gender +order by gender asc +""" + +view_copy_sql = """ +{{ + config( + materialized = "view" + ) +}} + +select * from {{ this.schema }}.users +""" + +view_summary_sql = """ +{{ + config( + materialized = "view" + ) +}} + +select gender, count(*) as ct from {{ref('view_copy')}} +group by gender +order by gender asc +""" + +view_using_ref_sql = """ +{{ + config( + materialized = "view" + ) +}} + +select gender, count(*) as ct from {{ var('var_ref') }} +group by gender +order by gender asc +""" + +properties_yml = """ +version: 2 +seeds: + - name: summary_expected + config: + column_types: + ct: BIGINT + gender: text +""" + + +@pytest.fixture(scope="class") +def models(): + return { + "ephemeral_copy.sql": ephemeral_copy_sql, + "ephemeral_summary.sql": ephemeral_summary_sql, + "incremental_copy.sql": incremental_copy_sql, + "incremental_summary.sql": incremental_summary_sql, + "materialized_copy.sql": materialized_copy_sql, + "materialized_summary.sql": materialized_summary_sql, + "view_copy.sql": view_copy_sql, + "view_summary.sql": view_summary_sql, + "view_using_ref.sql": view_using_ref_sql, + } + + +@pytest.fixture(scope="class") +def seeds(test_data_dir): + # Read seed file and return + seeds = {"properties.yml": properties_yml} + seed_csv = read_file(test_data_dir, "seed-initial.csv") + seeds["users.csv"] = seed_csv + summary_csv = read_file(test_data_dir, "summary_expected.csv") + seeds["summary_expected.csv"] = summary_csv + return seeds + + +@pytest.fixture(scope="class") +def project_config_update(): + return { + "vars": { + "test": { + "var_ref": '{{ ref("view_copy") }}', + }, + }, + "seeds": {"quote_columns": False}, + } + + +# This test checks that with different materializations we get the right +# tables copied or built. +def test_simple_reference(project): + results = run_dbt(["seed"]) + assert len(results) == 2 + + # Now run dbt + results = run_dbt() + assert len(results) == 8 + + # Copies should match + check_relations_equal( + project.adapter, ["users", "incremental_copy", "materialized_copy", "view_copy"] + ) + + # Summaries should match + check_relations_equal( + project.adapter, + [ + "summary_expected", + "incremental_summary", + "materialized_summary", + "view_summary", + "ephemeral_summary", + "view_using_ref", + ], + ) + + # update the seed files and run seed + copy_file( + project.test_data_dir, "seed-update.csv", project.project_root, ["seeds", "users.csv"] + ) + copy_file( + project.test_data_dir, + "summary_expected_update.csv", + project.project_root, + ["seeds", "summary_expected.csv"], + ) + results = run_dbt(["seed"]) + assert len(results) == 2 + + results = run_dbt() + assert len(results) == 8 + + # Copies should match + check_relations_equal( + project.adapter, ["users", "incremental_copy", "materialized_copy", "view_copy"] + ) + + # Summaries should match + check_relations_equal( + project.adapter, + [ + "summary_expected", + "incremental_summary", + "materialized_summary", + "view_summary", + "ephemeral_summary", + "view_using_ref", + ], + ) + + +def test_simple_reference_with_models_and_children(project): + results = run_dbt(["seed"]) + assert len(results) == 2 + + # Run materialized_copy, ephemeral_copy, and their dependents + results = run_dbt(["run", "--models", "materialized_copy+", "ephemeral_copy+"]) + assert len(results) == 3 + + # Copies should match + check_relations_equal(project.adapter, ["users", "materialized_copy"]) + + # Summaries should match + check_relations_equal( + project.adapter, ["summary_expected", "materialized_summary", "ephemeral_summary"] + ) + + created_tables = project.get_tables_in_schema() + + assert "incremental_copy" not in created_tables + assert "incremental_summary" not in created_tables + assert "view_copy" not in created_tables + assert "view_summary" not in created_tables + + # make sure this wasn't errantly materialized + assert "ephemeral_copy" not in created_tables + + assert "materialized_copy" in created_tables + assert "materialized_summary" in created_tables + assert created_tables["materialized_copy"] == "table" + assert created_tables["materialized_summary"] == "table" + + assert "ephemeral_summary" in created_tables + assert created_tables["ephemeral_summary"] == "table" + + +def test_simple_ref_with_models(project): + results = run_dbt(["seed"]) + assert len(results) == 2 + + # Run materialized_copy, ephemeral_copy, and their dependents + # ephemeral_copy should not actually be materialized b/c it is ephemeral + results = run_dbt(["run", "--models", "materialized_copy", "ephemeral_copy"]) + assert len(results) == 1 + + # Copies should match + check_relations_equal(project.adapter, ["users", "materialized_copy"]) + + created_tables = project.get_tables_in_schema() + assert "materialized_copy" in created_tables diff --git a/tests/functional/basic/test_varchar_widening.py b/tests/functional/basic/test_varchar_widening.py new file mode 100644 index 000000000..11d424eb4 --- /dev/null +++ b/tests/functional/basic/test_varchar_widening.py @@ -0,0 +1,56 @@ +import os + +from dbt.tests.util import check_relations_equal, run_dbt +import pytest + + +incremental_sql = """ +{{ + config( + materialized = "incremental" + ) +}} + +select * from {{ this.schema }}.seed + +{% if is_incremental() %} + + where id > (select max(id) from {{this}}) + +{% endif %} +""" + +materialized_sql = """ +{{ + config( + materialized = "table" + ) +}} + +select * from {{ this.schema }}.seed +""" + + +@pytest.fixture(scope="class") +def models(): + return {"incremental.sql": incremental_sql, "materialized.sql": materialized_sql} + + +def test_varchar_widening(project): + path = os.path.join(project.test_data_dir, "varchar10_seed.sql") + project.run_sql_file(path) + + results = run_dbt(["run"]) + assert len(results) == 2 + + check_relations_equal(project.adapter, ["seed", "incremental"]) + check_relations_equal(project.adapter, ["seed", "materialized"]) + + path = os.path.join(project.test_data_dir, "varchar300_seed.sql") + project.run_sql_file(path) + + results = run_dbt(["run"]) + assert len(results) == 2 + + check_relations_equal(project.adapter, ["seed", "incremental"]) + check_relations_equal(project.adapter, ["seed", "materialized"]) diff --git a/tests/functional/build_command/fixtures.py b/tests/functional/build_command/fixtures.py new file mode 100644 index 000000000..7f6b17d48 --- /dev/null +++ b/tests/functional/build_command/fixtures.py @@ -0,0 +1,302 @@ +seeds__country_csv = """iso3,name,iso2,iso_numeric,cow_alpha,cow_numeric,fao_code,un_code,wb_code,imf_code,fips,geonames_name,geonames_id,r_name,aiddata_name,aiddata_code,oecd_name,oecd_code,historical_name,historical_iso3,historical_iso2,historical_iso_numeric +ABW,Aruba,AW,533,,,,533,ABW,314,AA,Aruba,3577279,ARUBA,Aruba,12,Aruba,373,,,, +AFG,Afghanistan,AF,4,AFG,700,2,4,AFG,512,AF,Afghanistan,1149361,AFGHANISTAN,Afghanistan,1,Afghanistan,625,,,, +AGO,Angola,AO,24,ANG,540,7,24,AGO,614,AO,Angola,3351879,ANGOLA,Angola,7,Angola,225,,,, +AIA,Anguilla,AI,660,,,,660,AIA,312,AV,Anguilla,3573511,ANGUILLA,Anguilla,8,Anguilla,376,,,, +ALA,Aland Islands,AX,248,,,,248,ALA,,,Aland Islands,661882,ALAND ISLANDS,,,,,,,, +ALB,Albania,AL,8,ALB,339,3,8,ALB,914,AL,Albania,783754,ALBANIA,Albania,3,Albania,71,,,, +AND,Andorra,AD,20,AND,232,6,20,ADO,,AN,Andorra,3041565,ANDORRA,,,,,,,, +ANT,Netherlands Antilles,AN,530,,,,,ANT,353,NT,Netherlands Antilles,,NETHERLANDS ANTILLES,Netherlands Antilles,211,Netherlands Antilles,361,Netherlands Antilles,ANT,AN,530 +ARE,United Arab Emirates,AE,784,UAE,696,225,784,ARE,466,AE,United Arab Emirates,290557,UNITED ARAB EMIRATES,United Arab Emirates,140,United Arab Emirates,576,,,, +""" + +snapshots__snap_0 = """ +{% snapshot snap_0 %} + +{{ + config( + target_database=database, + target_schema=schema, + unique_key='iso3', + + strategy='timestamp', + updated_at='snap_0_updated_at', + ) +}} + +select *, current_timestamp as snap_0_updated_at from {{ ref('model_0') }} + +{% endsnapshot %} +""" + +snapshots__snap_1 = """ +{% snapshot snap_1 %} + +{{ + config( + target_database=database, + target_schema=schema, + unique_key='iso3', + + strategy='timestamp', + updated_at='snap_1_updated_at', + ) +}} + +SELECT + iso3, + name, + iso2, + iso_numeric, + cow_alpha, + cow_numeric, + fao_code, + un_code, + wb_code, + imf_code, + fips, + geonames_name, + geonames_id, + r_name, + aiddata_name, + aiddata_code, + oecd_name, + oecd_code, + historical_name, + historical_iso3, + historical_iso2, + historical_iso_numeric, + current_timestamp as snap_1_updated_at from {{ ref('model_1') }} + +{% endsnapshot %} +""" + +snapshots__snap_99 = """ +{% snapshot snap_99 %} + +{{ + config( + target_database=database, + target_schema=schema, + strategy='timestamp', + unique_key='num', + updated_at='snap_99_updated_at', + ) +}} + +select *, current_timestamp as snap_99_updated_at from {{ ref('model_99') }} + +{% endsnapshot %} +""" + +models__model_0_sql = """ +{{ config(materialized='table') }} + +select * from {{ ref('countries') }} +""" + +models__model_1_sql = """ +{{ config(materialized='table') }} + +select * from {{ ref('snap_0') }} +""" + +models__model_2_sql = """ +{{ config(materialized='table') }} + +select * from {{ ref('snap_1') }} +""" + +models__model_3_sql = """ +{{ config(materialized='table') }} + +select * from {{ ref('model_1') }} +""" + +models__model_99_sql = """ +{{ config(materialized='table') }} + +select '1' as "num" +""" + +models__test_yml = """ +version: 2 + +models: + - name: model_0 + columns: + - name: iso3 + data_tests: + - unique + - not_null + - name: model_2 + columns: + - name: iso3 + data_tests: + - unique + - not_null +""" + +unit_tests__yml = """ +unit_tests: + - name: ut_model_3 + model: model_3 + given: + - input: ref('model_1') + rows: + - {iso3: ABW, name: Aruba} + expect: + rows: + - {iso3: ABW, name: Aruba} +""" + +models_failing_tests__tests_yml = """ +version: 2 + +models: + - name: model_0 + columns: + - name: iso3 + data_tests: + - unique + - not_null + - name: historical_iso_numeric + data_tests: + - not_null + - name: model_2 + columns: + - name: iso3 + data_tests: + - unique + - not_null +""" + +models_failing__model_1_sql = """ +{{ config(materialized='table') }} + +select bad_column from {{ ref('snap_0') }} +""" + + +models_circular_relationship__test_yml = """ +version: 2 + +models: + - name: model_0 + columns: + - name: iso3 + data_tests: + - relationships: + to: ref('model_1') + field: iso3 + + - name: model_1 + columns: + - name: iso3 + data_tests: + - relationships: + to: ref('model_0') + field: iso3 + +""" + +models_simple_blocking__model_a_sql = """ +select null as id +""" + +models_simple_blocking__model_b_sql = """ +select * from {{ ref('model_a') }} +""" + +models_simple_blocking__test_yml = """ +version: 2 + +models: + - name: model_a + columns: + - name: id + data_tests: + - not_null +""" + +models_triple_blocking__test_yml = """ +version: 2 + +models: + - name: model_a + columns: + - name: id + data_tests: + - not_null + - name: model_b + columns: + - name: id + data_tests: + - not_null + - name: model_c + columns: + - name: id + data_tests: + - not_null +""" + +models_interdependent__model_a_sql = """ +select 1 as id +""" + +models_interdependent__model_b_sql = """ +select * from {{ ref('model_a') }} +""" + +models_interdependent__model_b_null_sql = """ +select null from {{ ref('model_a') }} +""" + + +models_interdependent__model_c_sql = """ +select * from {{ ref('model_b') }} +""" + +models_interdependent__test_yml = """ +version: 2 + +models: + - name: model_a + columns: + - name: id + data_tests: + - unique + - not_null + - relationships: + to: ref('model_b') + field: id + - relationships: + to: ref('model_c') + field: id + + - name: model_b + columns: + - name: id + data_tests: + - unique + - not_null + - relationships: + to: ref('model_a') + field: id + - relationships: + to: ref('model_c') + field: id + + - name: model_c + columns: + - name: id + data_tests: + - unique + - not_null + - relationships: + to: ref('model_a') + field: id + - relationships: + to: ref('model_b') + field: id +""" diff --git a/tests/functional/build_command/test_build.py b/tests/functional/build_command/test_build.py new file mode 100644 index 000000000..7ffa0a20f --- /dev/null +++ b/tests/functional/build_command/test_build.py @@ -0,0 +1,211 @@ +from dbt.tests.util import run_dbt +import pytest + +import fixtures + + +class TestBuildBase: + @pytest.fixture(scope="class") + def seeds(self): + return {"countries.csv": fixtures.seeds__country_csv} + + @pytest.fixture(scope="class") + def snapshots(self): + return { + "snap_0.sql": fixtures.snapshots__snap_0, + "snap_1.sql": fixtures.snapshots__snap_1, + "snap_99.sql": fixtures.snapshots__snap_99, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + }, + } + + +class TestPassingBuild(TestBuildBase): + @pytest.fixture(scope="class") + def models(self): + return { + "model_0.sql": fixtures.models__model_0_sql, + "model_1.sql": fixtures.models__model_1_sql, + "model_2.sql": fixtures.models__model_2_sql, + "model_3.sql": fixtures.models__model_3_sql, + "model_99.sql": fixtures.models__model_99_sql, + "test.yml": fixtures.models__test_yml + fixtures.unit_tests__yml, + } + + def test_build_happy_path(self, project): + run_dbt(["build"]) + + +class TestFailingBuild(TestBuildBase): + @pytest.fixture(scope="class") + def models(self): + return { + "model_0.sql": fixtures.models__model_0_sql, + "model_1.sql": fixtures.models_failing__model_1_sql, + "model_2.sql": fixtures.models__model_2_sql, + "model_3.sql": fixtures.models__model_3_sql, + "model_99.sql": fixtures.models__model_99_sql, + "test.yml": fixtures.models__test_yml + fixtures.unit_tests__yml, + } + + def test_failing_test_skips_downstream(self, project): + results = run_dbt(["build"], expect_pass=False) + assert len(results) == 14 + actual = [str(r.status) for r in results] + expected = ["error"] * 1 + ["skipped"] * 6 + ["pass"] * 2 + ["success"] * 5 + + assert sorted(actual) == sorted(expected) + + +class TestFailingTestsBuild(TestBuildBase): + @pytest.fixture(scope="class") + def models(self): + return { + "model_0.sql": fixtures.models__model_0_sql, + "model_1.sql": fixtures.models__model_1_sql, + "model_2.sql": fixtures.models__model_2_sql, + "model_99.sql": fixtures.models__model_99_sql, + "test.yml": fixtures.models_failing_tests__tests_yml, + } + + def test_failing_test_skips_downstream(self, project): + results = run_dbt(["build"], expect_pass=False) + assert len(results) == 13 + actual = [str(r.status) for r in results] + expected = ["fail"] + ["skipped"] * 6 + ["pass"] * 2 + ["success"] * 4 + assert sorted(actual) == sorted(expected) + + +class TestCircularRelationshipTestsBuild(TestBuildBase): + @pytest.fixture(scope="class") + def models(self): + return { + "model_0.sql": fixtures.models__model_0_sql, + "model_1.sql": fixtures.models__model_1_sql, + "model_99.sql": fixtures.models__model_99_sql, + "test.yml": fixtures.models_circular_relationship__test_yml, + } + + def test_circular_relationship_test_success(self, project): + """Ensure that tests that refer to each other's model don't create + a circular dependency.""" + results = run_dbt(["build"]) + actual = [str(r.status) for r in results] + expected = ["success"] * 7 + ["pass"] * 2 + + assert sorted(actual) == sorted(expected) + + +class TestSimpleBlockingTest: + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": fixtures.models_simple_blocking__model_a_sql, + "model_b.sql": fixtures.models_simple_blocking__model_b_sql, + "test.yml": fixtures.models_simple_blocking__test_yml, + } + + def test_simple_blocking_test(self, project): + """Ensure that a failed test on model_a always blocks model_b""" + results = run_dbt(["build"], expect_pass=False) + actual = [r.status for r in results] + expected = ["success", "fail", "skipped"] + assert sorted(actual) == sorted(expected) + + +class TestInterdependentModels: + @pytest.fixture(scope="class") + def seeds(self): + return {"countries.csv": fixtures.seeds__country_csv} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + }, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": fixtures.models_interdependent__model_a_sql, + "model_b.sql": fixtures.models_interdependent__model_b_sql, + "model_c.sql": fixtures.models_interdependent__model_c_sql, + "test.yml": fixtures.models_interdependent__test_yml, + } + + def test_interdependent_models(self, project): + results = run_dbt(["build"]) + assert len(results) == 16 + + +class TestInterdependentModelsFail: + @pytest.fixture(scope="class") + def seeds(self): + return {"countries.csv": fixtures.seeds__country_csv} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + }, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": fixtures.models_interdependent__model_a_sql, + "model_b.sql": fixtures.models_interdependent__model_b_null_sql, + "model_c.sql": fixtures.models_interdependent__model_c_sql, + "test.yml": fixtures.models_interdependent__test_yml, + } + + def test_interdependent_models_fail(self, project): + results = run_dbt(["build"], expect_pass=False) + assert len(results) == 16 + + actual = [str(r.status) for r in results] + expected = ["error"] * 4 + ["skipped"] * 7 + ["pass"] * 2 + ["success"] * 3 + assert sorted(actual) == sorted(expected) + + +class TestDownstreamSelection: + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": fixtures.models_simple_blocking__model_a_sql, + "model_b.sql": fixtures.models_simple_blocking__model_b_sql, + "test.yml": fixtures.models_simple_blocking__test_yml, + } + + def test_downstream_selection(self, project): + """Ensure that selecting test+ does not select model_a's other children""" + # fails with "Got 1 result, configured to fail if != 0" + # model_a is defined as select null as id + results = run_dbt(["build", "--select", "model_a not_null_model_a_id+"], expect_pass=False) + assert len(results) == 2 + + +class TestLimitedUpstreamSelection: + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": fixtures.models_interdependent__model_a_sql, + "model_b.sql": fixtures.models_interdependent__model_b_sql, + "model_c.sql": fixtures.models_interdependent__model_c_sql, + "test.yml": fixtures.models_triple_blocking__test_yml, + } + + def test_limited_upstream_selection(self, project): + """Ensure that selecting 1+model_c only selects up to model_b (+ tests of both)""" + # Fails with "relation "test17005969872609282880_test_build.model_a" does not exist" + results = run_dbt(["build", "--select", "1+model_c"], expect_pass=False) + assert len(results) == 4 diff --git a/tests/functional/cli/test_cli_exit_codes.py b/tests/functional/cli/test_cli_exit_codes.py new file mode 100644 index 000000000..87daa36f2 --- /dev/null +++ b/tests/functional/cli/test_cli_exit_codes.py @@ -0,0 +1,37 @@ +from dbt.cli.exceptions import ResultExit +from dbt.cli.main import cli +import pytest + + +good_sql = """ +select 1 as fun +""" + +bad_sql = """ +someting bad +""" + + +class CliRunnerBase: + def run_cli(self): + ctx = cli.make_context(cli.name, ["run"]) + return cli.invoke(ctx) + + +class TestExitCodeZero(CliRunnerBase): + @pytest.fixture(scope="class") + def models(self): + return {"model_one.sql": good_sql} + + def test_no_exc_thrown(self, project): + self.run_cli() + + +class TestExitCodeOne(CliRunnerBase): + @pytest.fixture(scope="class") + def models(self): + return {"model_one.sql": bad_sql} + + def test_exc_thrown(self, project): + with pytest.raises(ResultExit): + self.run_cli() diff --git a/tests/functional/cli/test_env_var_deprecations.py b/tests/functional/cli/test_env_var_deprecations.py new file mode 100644 index 000000000..27293a36c --- /dev/null +++ b/tests/functional/cli/test_env_var_deprecations.py @@ -0,0 +1,60 @@ +import os + +from dbt.tests.util import read_file, run_dbt +import pytest + + +model_one_sql = """ + select 1 as fun +""" + + +class TestDeprecatedEnvVars: + @pytest.fixture(scope="class") + def models(self): + return {"model_one.sql": model_one_sql} + + def test_defer(self, project, logs_dir): + self.assert_deprecated( + logs_dir, + "DBT_DEFER_TO_STATE", + "DBT_DEFER", + ) + + def test_favor_state(self, project, logs_dir): + self.assert_deprecated( + logs_dir, + "DBT_FAVOR_STATE_MODE", + "DBT_FAVOR_STATE", + command="build", + ) + + def test_print(self, project, logs_dir): + self.assert_deprecated( + logs_dir, + "DBT_NO_PRINT", + "DBT_PRINT", + ) + + def test_state(self, project, logs_dir): + self.assert_deprecated( + logs_dir, + "DBT_ARTIFACT_STATE_PATH", + "DBT_STATE", + old_val=".", + ) + + def assert_deprecated(self, logs_dir, old_env_var, new_env_var, command="run", old_val="0"): + os.environ[old_env_var] = old_val + run_dbt([command]) + + # replacing new lines with spaces accounts for text wrapping + log_file = read_file(logs_dir, "dbt.log").replace("\n", " ").replace("\\n", " ") + dep_str = f"The environment variable `{old_env_var}` has been renamed as `{new_env_var}`" + + try: + assert dep_str in log_file + except Exception as e: + del os.environ[old_env_var] + raise e + del os.environ[old_env_var] diff --git a/tests/functional/cli/test_error_handling.py b/tests/functional/cli/test_error_handling.py new file mode 100644 index 000000000..0fa6c2950 --- /dev/null +++ b/tests/functional/cli/test_error_handling.py @@ -0,0 +1,19 @@ +from dbt.tests.util import run_dbt +import pytest + + +model_one_sql = """ +someting bad +""" + + +class TestHandledExit: + @pytest.fixture(scope="class") + def models(self): + return {"model_one.sql": model_one_sql} + + def test_failed_run_does_not_throw(self, project): + run_dbt(["run"], expect_pass=False) + + def test_fail_fast_failed_run_does_not_throw(self, project): + run_dbt(["--fail-fast", "run"], expect_pass=False) diff --git a/tests/functional/cli/test_multioption.py b/tests/functional/cli/test_multioption.py new file mode 100644 index 000000000..0de80a4f2 --- /dev/null +++ b/tests/functional/cli/test_multioption.py @@ -0,0 +1,142 @@ +from dbt.tests.util import run_dbt +import pytest + + +model_one_sql = """ +select 1 as fun +""" + +schema_sql = """ +sources: + - name: my_source + description: "My source" + schema: test_schema + tables: + - name: my_table + - name: my_other_table + +exposures: + - name: weekly_jaffle_metrics + label: By the Week + type: dashboard + maturity: high + url: https://bi.tool/dashboards/1 + description: > + Did someone say "exponential growth"? + depends_on: + - ref('model_one') + owner: + name: dbt Labs + email: data@jaffleshop.com +""" + + +class TestResourceType: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": schema_sql, "model_one.sql": model_one_sql} + + def test_resource_type_single(self, project): + result = run_dbt(["-q", "ls", "--resource-types", "model"]) + assert len(result) == 1 + assert result == ["test.model_one"] + + def test_resource_type_quoted(self, project): + result = run_dbt(["-q", "ls", "--resource-types", "model source"]) + assert len(result) == 3 + expected_result = { + "test.model_one", + "source:test.my_source.my_table", + "source:test.my_source.my_other_table", + } + assert set(result) == expected_result + + def test_resource_type_args(self, project): + result = run_dbt( + [ + "-q", + "ls", + "--resource-type", + "model", + "--resource-type", + "source", + "--resource-type", + "exposure", + ] + ) + assert len(result) == 4 + expected_result = { + "test.model_one", + "source:test.my_source.my_table", + "source:test.my_source.my_other_table", + "exposure:test.weekly_jaffle_metrics", + } + assert set(result) == expected_result + + +class TestOutputKeys: + @pytest.fixture(scope="class") + def models(self): + return {"model_one.sql": model_one_sql} + + def test_output_key_single(self, project): + result = run_dbt(["-q", "ls", "--output", "json", "--output-keys", "name"]) + assert len(result) == 1 + assert result == ['{"name": "model_one"}'] + + def test_output_key_quoted(self, project): + result = run_dbt(["-q", "ls", "--output", "json", "--output-keys", "name resource_type"]) + + assert len(result) == 1 + assert result == ['{"name": "model_one", "resource_type": "model"}'] + + def test_output_key_args(self, project): + result = run_dbt( + [ + "-q", + "ls", + "--output", + "json", + "--output-keys", + "name", + "--output-keys", + "resource_type", + ] + ) + + assert len(result) == 1 + assert result == ['{"name": "model_one", "resource_type": "model"}'] + + +class TestSelectExclude: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + "model_two.sql": model_one_sql, + "model_three.sql": model_one_sql, + } + + def test_select_exclude_single(self, project): + result = run_dbt(["-q", "ls", "--select", "model_one"]) + assert len(result) == 1 + assert result == ["test.model_one"] + result = run_dbt(["-q", "ls", "--exclude", "model_one"]) + assert len(result) == 2 + assert "test.model_one" not in result + + def test_select_exclude_quoted(self, project): + result = run_dbt(["-q", "ls", "--select", "model_one model_two"]) + assert len(result) == 2 + assert "test.model_three" not in result + result = run_dbt(["-q", "ls", "--exclude", "model_one model_two"]) + assert len(result) == 1 + assert result == ["test.model_three"] + + def test_select_exclude_args(self, project): + result = run_dbt(["-q", "ls", "--select", "model_one", "--select", "model_two"]) + assert len(result) == 2 + assert "test.model_three" not in result + result = run_dbt(["-q", "ls", "--exclude", "model_one", "--exclude", "model_two"]) + assert len(result) == 1 + assert result == ["test.model_three"] diff --git a/tests/functional/cli/test_resolvers.py b/tests/functional/cli/test_resolvers.py new file mode 100644 index 000000000..e809a4e1c --- /dev/null +++ b/tests/functional/cli/test_resolvers.py @@ -0,0 +1,36 @@ +from pathlib import Path + +from dbt.cli.resolvers import default_log_path +import pytest + + +class TestDefaultLogPathNoProject: + def test_default_log_path_no_project(self): + expected_log_path = Path("logs") + actual_log_path = default_log_path("nonexistent_project_dir") + + assert actual_log_path == expected_log_path + + +class TestDefaultLogPathWithProject: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"log-path": "test_default_log_path"} + + def test_default_log_path_with_project(self, project, project_config_update): + expected_log_path = Path(project.project_root) / "test_default_log_path" + actual_log_path = default_log_path(project.project_root) + + assert actual_log_path == expected_log_path + + +class TestDefaultLogPathWithProjectNoConfiguredLogPath: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"log-path": None} + + def test_default_log_path_with_project(self, project, project_config_update): + expected_log_path = Path(project.project_root) / "logs" + actual_log_path = default_log_path(project.project_root) + + assert actual_log_path == expected_log_path diff --git a/tests/functional/compile/fixtures.py b/tests/functional/compile/fixtures.py new file mode 100644 index 000000000..e0be7c895 --- /dev/null +++ b/tests/functional/compile/fixtures.py @@ -0,0 +1,58 @@ +first_model_sql = """ +select 1 as fun +""" + +second_model_sql = """ +{%- set columns = adapter.get_columns_in_relation(ref('first_model')) -%} +select + *, + {{ this.schema }} as schema +from {{ ref('first_model') }} +""" + +first_ephemeral_model_sql = """ +{{ config(materialized = 'ephemeral') }} +select 1 as fun +""" + +second_ephemeral_model_sql = """ +{{ config(materialized = 'ephemeral') }} +select * from {{ ref('first_ephemeral_model') }} +""" + +third_ephemeral_model_sql = """ +select * from {{ ref('second_ephemeral_model')}} +union all +select 2 as fun +""" + +model_multiline_jinja = """ +select {{ + 1 + 1 +}} as fun +""" + +with_recursive_model_sql = """ +{{ config(materialized = 'ephemeral') }} +with recursive t(n) as ( + select * from {{ ref('first_ephemeral_model') }} + union all + select n+1 from t where n < 100 +) +select sum(n) from t; +""" + +schema_yml = """ +version: 2 + +models: + - name: second_model + description: "The second model" + columns: + - name: fun + data_tests: + - not_null + - name: schema + data_tests: + - unique +""" diff --git a/tests/functional/compile/test_compile.py b/tests/functional/compile/test_compile.py new file mode 100644 index 000000000..4e6b4fe1d --- /dev/null +++ b/tests/functional/compile/test_compile.py @@ -0,0 +1,218 @@ +import json +import pathlib +import re + +from dbt.tests.util import read_file, run_dbt, run_dbt_and_capture +from dbt_common.exceptions import DbtBaseException, DbtRuntimeError +import pytest + +from tests.functional.compile import fixtures +from tests.functional.dbt_runner import dbtTestRunner + + +def norm_whitespace(string): + _RE_COMBINE_WHITESPACE = re.compile(r"\s+") + string = _RE_COMBINE_WHITESPACE.sub(" ", string).strip() + return string + + +def get_lines(model_name): + f = read_file("target", "compiled", "test", "models", model_name + ".sql") + return [line for line in f.splitlines() if line] + + +def file_exists(model_name): + from dbt.tests.util import file_exists + + return file_exists("target", "compiled", "test", "models", model_name + ".sql") + + +class TestIntrospectFlag: + @pytest.fixture(scope="class") + def models(self): + return { + "first_model.sql": fixtures.first_model_sql, + "second_model.sql": fixtures.second_model_sql, + "schema.yml": fixtures.schema_yml, + } + + def test_default(self, project): + run_dbt(["compile"]) + assert get_lines("first_model") == ["select 1 as fun"] + assert any("_test_compile as schema" in line for line in get_lines("second_model")) + + def test_no_introspect(self, project): + with pytest.raises(DbtRuntimeError, match="connection never acquired for thread"): + run_dbt(["compile", "--no-introspect"]) + + +class TestEphemeralModels: + @pytest.fixture(scope="class") + def models(self): + return { + "first_ephemeral_model.sql": fixtures.first_ephemeral_model_sql, + "second_ephemeral_model.sql": fixtures.second_ephemeral_model_sql, + "third_ephemeral_model.sql": fixtures.third_ephemeral_model_sql, + "with_recursive_model.sql": fixtures.with_recursive_model_sql, + } + + def test_first_selector(self, project): + (results, log_output) = run_dbt_and_capture( + ["compile", "--select", "first_ephemeral_model"] + ) + assert file_exists("first_ephemeral_model") + assert not file_exists("second_ephemeral_model") + assert not file_exists("third_ephemeral_model") + assert "Compiled node 'first_ephemeral_model' is" in log_output + + def test_middle_selector(self, project): + (results, log_output) = run_dbt_and_capture( + ["compile", "--select", "second_ephemeral_model"] + ) + assert file_exists("first_ephemeral_model") + assert file_exists("second_ephemeral_model") + assert not file_exists("third_ephemeral_model") + assert "Compiled node 'second_ephemeral_model' is" in log_output + + def test_last_selector(self, project): + (results, log_output) = run_dbt_and_capture( + ["compile", "--select", "third_ephemeral_model"] + ) + assert file_exists("first_ephemeral_model") + assert file_exists("second_ephemeral_model") + assert file_exists("third_ephemeral_model") + assert "Compiled node 'third_ephemeral_model' is" in log_output + + def test_no_selector(self, project): + run_dbt(["compile"]) + + sql = read_file("target", "compiled", "test", "models", "first_ephemeral_model.sql") + assert norm_whitespace(sql) == norm_whitespace("select 1 as fun") + sql = read_file("target", "compiled", "test", "models", "second_ephemeral_model.sql") + expected_sql = """with __dbt__cte__first_ephemeral_model as ( + select 1 as fun + ) select * from __dbt__cte__first_ephemeral_model""" + assert norm_whitespace(sql) == norm_whitespace(expected_sql) + sql = read_file("target", "compiled", "test", "models", "third_ephemeral_model.sql") + expected_sql = """with __dbt__cte__first_ephemeral_model as ( + select 1 as fun + ), __dbt__cte__second_ephemeral_model as ( + select * from __dbt__cte__first_ephemeral_model + ) select * from __dbt__cte__second_ephemeral_model + union all + select 2 as fun""" + assert norm_whitespace(sql) == norm_whitespace(expected_sql) + + def test_with_recursive_cte(self, project): + run_dbt(["compile"]) + + assert get_lines("with_recursive_model") == [ + "with recursive __dbt__cte__first_ephemeral_model as (", + "select 1 as fun", + "), t(n) as (", + " select * from __dbt__cte__first_ephemeral_model", + " union all", + " select n+1 from t where n < 100", + ")", + "select sum(n) from t;", + ] + + +class TestCompile: + @pytest.fixture(scope="class") + def models(self): + return { + "first_model.sql": fixtures.first_model_sql, + "second_model.sql": fixtures.second_model_sql, + "schema.yml": fixtures.schema_yml, + } + + def test_none(self, project): + (results, log_output) = run_dbt_and_capture(["compile"]) + assert len(results) == 4 + assert "Compiled node" not in log_output + + def test_inline_pass(self, project): + (results, log_output) = run_dbt_and_capture( + ["compile", "--inline", "select * from {{ ref('first_model') }}"] + ) + assert len(results) == 1 + assert "Compiled inline node is:" in log_output + + def test_select_pass(self, project): + (results, log_output) = run_dbt_and_capture(["compile", "--select", "second_model"]) + assert len(results) == 3 + assert "Compiled node 'second_model' is:" in log_output + + def test_select_pass_empty(self, project): + (results, log_output) = run_dbt_and_capture( + ["compile", "--indirect-selection", "empty", "--select", "second_model"] + ) + assert len(results) == 1 + assert "Compiled node 'second_model' is:" in log_output + + def test_inline_fail(self, project): + with pytest.raises(DbtBaseException, match="Error parsing inline query"): + run_dbt(["compile", "--inline", "select * from {{ ref('third_model') }}"]) + + def test_inline_fail_database_error(self, project): + with pytest.raises(DbtRuntimeError, match="Database Error"): + run_dbt(["show", "--inline", "slect asdlkjfsld;j"]) + + def test_multiline_jinja(self, project): + (results, log_output) = run_dbt_and_capture(["compile", "--inline", fixtures.model_multiline_jinja]) + assert len(results) == 1 + assert "Compiled inline node is:" in log_output + + def test_output_json_select(self, project): + (results, log_output) = run_dbt_and_capture( + ["compile", "--select", "second_model", "--output", "json"] + ) + assert len(results) == 3 + assert "node" in log_output + assert "compiled" in log_output + + def test_output_json_inline(self, project): + (results, log_output) = run_dbt_and_capture( + ["compile", "--inline", "select * from {{ ref('second_model') }}", "--output", "json"] + ) + assert len(results) == 1 + assert '"node"' not in log_output + assert '"compiled"' in log_output + + def test_compile_inline_not_add_node(self, project): + dbt = dbtTestRunner() + parse_result = dbt.invoke(["parse"]) + manifest = parse_result.result + assert len(manifest.nodes) == 4 + dbt = dbtTestRunner(manifest=manifest) + dbt.invoke( + ["compile", "--inline", "select * from {{ ref('second_model') }}"], + populate_cache=False, + ) + assert len(manifest.nodes) == 4 + + def test_compile_inline_syntax_error(self, project, mocker): + patched_fire_event = mocker.patch("dbt.task.compile.fire_event") + with pytest.raises(DbtBaseException, match="Error parsing inline query"): + run_dbt(["compile", "--inline", "select * from {{ ref(1) }}"]) + # Event for parsing error fired + patched_fire_event.assert_called_once() + + def test_compile_inline_ref_node_not_exist(self, project, mocker): + patched_fire_event = mocker.patch("dbt.task.compile.fire_event") + with pytest.raises(DbtBaseException, match="Error parsing inline query"): + run_dbt(["compile", "--inline", "select * from {{ ref('third_model') }}"]) + # Event for parsing error fired + patched_fire_event.assert_called_once() + + def test_graph_summary_output(self, project): + """Ensure that the compile command generates a file named graph_summary.json + in the target directory, that the file contains valid json, and that the + json has the high level structure it should.""" + dbtTestRunner().invoke(["compile"]) + summary_path = pathlib.Path(project.project_root, "target/graph_summary.json") + with open(summary_path, "r") as summary_file: + summary = json.load(summary_file) + assert "_invocation_id" in summary + assert "linked" in summary diff --git a/tests/functional/configs/fixtures.py b/tests/functional/configs/fixtures.py new file mode 100644 index 000000000..bb50393fc --- /dev/null +++ b/tests/functional/configs/fixtures.py @@ -0,0 +1,201 @@ +# NOTE: these fixtures also get used in `/tests/functional/saved_queries/` +import pytest + +models__schema_yml = """ +version: 2 +sources: + - name: raw + database: "{{ target.database }}" + schema: "{{ target.schema }}" + tables: + - name: 'seed' + identifier: "{{ var('seed_name', 'invalid') }}" + columns: + - name: id + data_tests: + - unique: + enabled: "{{ var('enabled_direct', None) | as_native }}" + - accepted_values: + enabled: "{{ var('enabled_direct', None) | as_native }}" + severity: "{{ var('severity_direct', None) | as_native }}" + values: [1,2] + +models: + - name: model + columns: + - name: id + data_tests: + - unique + - accepted_values: + values: [1,2,3,4] + +""" + +models__untagged_sql = """ +{{ + config(materialized='table') +}} + +select id, value from {{ source('raw', 'seed') }} + +""" + +models__tagged__model_sql = """ +{{ + config( + materialized='view', + tags=['tag_two'], + ) +}} + +{{ + config( + materialized='table', + tags=['tag_three'], + ) +}} + +select 4 as id, 2 as value + +""" + +seeds__seed_csv = """id,value +4,2 +""" + +tests__failing_sql = """ + +select 1 as fun + +""" + +tests__sleeper_agent_sql = """ +{{ config( + enabled = var('enabled_direct', False), + severity = var('severity_direct', 'WARN') +) }} + +select 1 as fun + +""" + +my_model = """ +select 1 as user +""" + +my_model_2 = """ +select * from {{ ref('my_model') }} +""" + +my_model_3 = """ +select * from {{ ref('my_model_2') }} +""" + +my_model_2_disabled = """ +{{ config(enabled=false) }} +select * from {{ ref('my_model') }} +""" + +my_model_3_disabled = """ +{{ config(enabled=false) }} +select * from {{ ref('my_model_2') }} +""" + +my_model_2_enabled = """ +{{ config(enabled=true) }} +select * from {{ ref('my_model') }} +""" + +my_model_3_enabled = """ +{{ config(enabled=true) }} +select * from {{ ref('my_model') }} +""" + +schema_all_disabled_yml = """ +version: 2 +models: + - name: my_model + - name: my_model_2 + config: + enabled: false + - name: my_model_3 + config: + enabled: false +""" + +schema_explicit_enabled_yml = """ +version: 2 +models: + - name: my_model + - name: my_model_2 + config: + enabled: true + - name: my_model_3 + config: + enabled: true +""" + +schema_partial_disabled_yml = """ +version: 2 +models: + - name: my_model + - name: my_model_2 + config: + enabled: false + - name: my_model_3 +""" + +schema_partial_enabled_yml = """ +version: 2 +models: + - name: my_model + - name: my_model_2 + config: + enabled: True + - name: my_model_3 +""" + +schema_invalid_enabled_yml = """ +version: 2 +models: + - name: my_model + config: + enabled: True and False + - name: my_model_3 +""" + +simple_snapshot = """{% snapshot mysnapshot %} + + {{ + config( + target_schema='snapshots', + strategy='timestamp', + unique_key='id', + updated_at='updated_at' + ) + }} + + select * from dummy + +{% endsnapshot %}""" + + +class BaseConfigProject: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "untagged.sql": models__untagged_sql, + "tagged": {"model.sql": models__tagged__model_sql}, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": seeds__seed_csv} + + @pytest.fixture(scope="class") + def tests(self): + return { + "failing.sql": tests__failing_sql, + "sleeper_agent.sql": tests__sleeper_agent_sql, + } diff --git a/tests/functional/configs/test_configs.py b/tests/functional/configs/test_configs.py new file mode 100644 index 000000000..18efccfc7 --- /dev/null +++ b/tests/functional/configs/test_configs.py @@ -0,0 +1,138 @@ +import os + +from dbt.exceptions import ParsingError +from dbt.tests.util import ( + check_relations_equal, + run_dbt, + update_config_file, + write_file, +) +from dbt_common.dataclass_schema import ValidationError +import pytest + +from fixtures import BaseConfigProject, simple_snapshot + + +class TestConfigs(BaseConfigProject): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "tagged": { + # the model configs will override this + "materialized": "invalid", + # the model configs will append to these + "tags": ["tag_one"], + } + }, + }, + "seeds": { + "quote_columns": False, + }, + } + + def test_config_layering( + self, + project, + ): + # run seed + results = run_dbt(["seed"]) + assert len(results) == 1 + + # test the project-level tag, and both config() call tags + assert len(run_dbt(["run", "--model", "tag:tag_one"])) == 1 + assert len(run_dbt(["run", "--model", "tag:tag_two"])) == 1 + assert len(run_dbt(["run", "--model", "tag:tag_three"])) == 1 + check_relations_equal(project.adapter, ["seed", "model"]) + + # make sure we overwrote the materialization properly + tables = project.get_tables_in_schema() + assert tables["model"] == "table" + + +# In addition to testing an alternative target-paths setting, it tests that +# the attribute is jinja rendered and that the context "modules" works. +class TestTargetConfigs(BaseConfigProject): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "target-path": "target_{{ modules.datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S') }}", + "seeds": { + "quote_columns": False, + }, + } + + def test_alternative_target_paths(self, project): + # chdir to a different directory to test creation of target directory under project_root + os.chdir(project.profiles_dir) + run_dbt(["seed"]) + + target_path = "" + for d in os.listdir(project.project_root): + if os.path.isdir(os.path.join(project.project_root, d)) and d.startswith("target_"): + target_path = d + assert os.path.exists(os.path.join(project.project_root, target_path, "manifest.json")) + + +class TestInvalidTestsMaterializationProj(object): + def test_tests_materialization_proj_config(self, project): + config_patch = {"data_tests": {"materialized": "table"}} + update_config_file(config_patch, project.project_root, "dbt_project.yml") + tests_dir = os.path.join(project.project_root, "tests") + write_file("select * from foo", tests_dir, "test.sql") + + with pytest.raises(ValidationError): + run_dbt() + + +class TestInvalidSeedsMaterializationProj(object): + def test_seeds_materialization_proj_config(self, project): + config_patch = {"seeds": {"materialized": "table"}} + update_config_file(config_patch, project.project_root, "dbt_project.yml") + + seeds_dir = os.path.join(project.project_root, "seeds") + write_file("id1, id2\n1, 2", seeds_dir, "seed.csv") + + with pytest.raises(ValidationError): + run_dbt() + + +class TestInvalidSeedsMaterializationSchema(object): + def test_seeds_materialization_schema_config(self, project): + seeds_dir = os.path.join(project.project_root, "seeds") + write_file( + "version: 2\nseeds:\n - name: myseed\n config:\n materialized: table", + seeds_dir, + "schema.yml", + ) + write_file("id1, id2\n1, 2", seeds_dir, "myseed.csv") + + with pytest.raises(ValidationError): + run_dbt() + + +class TestInvalidSnapshotsMaterializationProj(object): + def test_snapshots_materialization_proj_config(self, project): + config_patch = {"snapshots": {"materialized": "table"}} + update_config_file(config_patch, project.project_root, "dbt_project.yml") + + snapshots_dir = os.path.join(project.project_root, "snapshots") + write_file(simple_snapshot, snapshots_dir, "mysnapshot.sql") + + with pytest.raises(ParsingError): + run_dbt() + + +class TestInvalidSnapshotsMaterializationSchema(object): + def test_snapshots_materialization_schema_config(self, project): + snapshots_dir = os.path.join(project.project_root, "snapshots") + write_file( + "version: 2\nsnapshots:\n - name: mysnapshot\n config:\n materialized: table", + snapshots_dir, + "schema.yml", + ) + write_file(simple_snapshot, snapshots_dir, "mysnapshot.sql") + + with pytest.raises(ValidationError): + run_dbt() diff --git a/tests/functional/configs/test_configs_in_schema_files.py b/tests/functional/configs/test_configs_in_schema_files.py new file mode 100644 index 000000000..aab0a964c --- /dev/null +++ b/tests/functional/configs/test_configs_in_schema_files.py @@ -0,0 +1,257 @@ +from dbt.exceptions import ParsingError +from dbt.tests.util import ( + check_relations_equal, + get_manifest, + run_dbt, + write_file, +) +from dbt_common.exceptions import CompilationError +import pytest + + +models_alt__schema_yml = """ +version: 2 +sources: + - name: raw + database: "{{ target.database }}" + schema: "{{ target.schema }}" + tables: + - name: 'some_seed' + columns: + - name: id + +models: + - name: model + description: "This is a model description" + config: + tags: ['tag_in_schema'] + meta: + owner: 'Julie Smith' + my_attr: "{{ var('my_var') }}" + materialized: view + + columns: + - name: id + data_tests: + - not_null: + meta: + owner: 'Simple Simon' + - unique: + config: + meta: + owner: 'John Doe' +""" + +models_alt__untagged_sql = """ +{{ + config(materialized='table') +}} + +select id, value from {{ source('raw', 'some_seed') }} +""" + +models_alt__tagged__model_sql = """ +{{ + config( + materialized='view', + tags=['tag_1_in_model'], + ) +}} + +{{ + config( + materialized='table', + tags=['tag_2_in_model'], + ) +}} + +select 4 as id, 2 as value +""" + +models_no_materialized__model_sql = """ +{{ + config( + tags=['tag_1_in_model'], + ) +}} + +{{ + config( + tags=['tag_2_in_model'], + ) +}} + +select 4 as id, 2 as value +""" + +seeds_alt__some_seed_csv = """id,value +4,2 +""" + +extra_alt__untagged_yml = """ +version: 2 + +models: + - name: untagged + description: "This is a model description" + meta: + owner: 'Somebody Else' + config: + meta: + owner: 'Julie Smith' +""" + +extra_alt__untagged2_yml = """ +version: 2 + +models: + - name: untagged + description: "This is a model description" + data_tests: + - not_null: + error_if: ">2" + config: + error_if: ">2" +""" + + +class TestSchemaFileConfigs: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_alt__schema_yml, + "untagged.sql": models_alt__untagged_sql, + "tagged": {"model.sql": models_alt__tagged__model_sql}, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"some_seed.csv": seeds_alt__some_seed_csv} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "+meta": { + "company": "NuMade", + }, + "test": { + "+meta": { + "project": "test", + }, + "tagged": { + "+meta": { + "team": "Core Team", + }, + "tags": ["tag_in_project"], + "model": { + "materialized": "table", + "+meta": { + "owner": "Julie Dent", + }, + }, + }, + }, + }, + "vars": { + "test": { + "my_var": "TESTING", + } + }, + "seeds": { + "quote_columns": False, + }, + } + + def test_config_layering( + self, + project, + ): + + # run seed + assert len(run_dbt(["seed"])) == 1 + + # test the project-level tag, and both config() call tags + assert len(run_dbt(["run", "--model", "tag:tag_in_project"])) == 1 + assert len(run_dbt(["run", "--model", "tag:tag_1_in_model"])) == 1 + assert len(run_dbt(["run", "--model", "tag:tag_2_in_model"])) == 1 + assert len(run_dbt(["run", "--model", "tag:tag_in_schema"])) == 1 + + # Verify that model nodes have expected tags and meta + manifest = get_manifest(project.project_root) + model_id = "model.test.model" + model_node = manifest.nodes[model_id] + meta_expected = { + "company": "NuMade", + "project": "test", + "team": "Core Team", + "owner": "Julie Smith", + "my_attr": "TESTING", + } + assert model_node.meta == meta_expected + assert model_node.config.meta == meta_expected + model_tags = ["tag_1_in_model", "tag_2_in_model", "tag_in_project", "tag_in_schema"] + model_node_tags = model_node.tags.copy() + model_node_tags.sort() + assert model_node_tags == model_tags + model_node_config_tags = model_node.config.tags.copy() + model_node_config_tags.sort() + assert model_node_config_tags == model_tags + model_meta = { + "company": "NuMade", + "project": "test", + "team": "Core Team", + "owner": "Julie Smith", + "my_attr": "TESTING", + } + assert model_node.config.meta == model_meta + + # make sure we overwrote the materialization properly + tables = project.get_tables_in_schema() + assert tables["model"] == "table" + check_relations_equal(project.adapter, ["some_seed", "model"]) + + # Remove materialized config from model + write_file( + models_no_materialized__model_sql, + project.project_root, + "models", + "tagged", + "model.sql", + ) + results = run_dbt(["run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model_node = manifest.nodes[model_id] + + assert model_node.config.materialized == "view" + model_unrendered_config = { + "materialized": "view", + "meta": {"my_attr": "TESTING", "owner": "Julie Smith"}, + "tags": ["tag_1_in_model", "tag_2_in_model"], + } + assert model_node.unrendered_config == model_unrendered_config + + # look for test meta + schema_file_id = model_node.patch_path + schema_file = manifest.files[schema_file_id] + tests = schema_file.get_tests("models", "model") + assert tests[0] in manifest.nodes + test = manifest.nodes[tests[0]] + expected_meta = {"owner": "Simple Simon"} + assert test.config.meta == expected_meta + test = manifest.nodes[tests[1]] + expected_meta = {"owner": "John Doe"} + assert test.config.meta == expected_meta + + # copy a schema file with multiple metas + # shutil.copyfile('extra-alt/untagged.yml', 'models-alt/untagged.yml') + write_file(extra_alt__untagged_yml, project.project_root, "models", "untagged.yml") + with pytest.raises(ParsingError): + run_dbt(["run"]) + + # copy a schema file with config key in top-level of test and in config dict + # shutil.copyfile('extra-alt/untagged2.yml', 'models-alt/untagged.yml') + write_file(extra_alt__untagged2_yml, project.project_root, "models", "untagged.yml") + with pytest.raises(CompilationError): + run_dbt(["run"]) diff --git a/tests/functional/configs/test_contract_configs.py b/tests/functional/configs/test_contract_configs.py new file mode 100644 index 000000000..a7f4e35c8 --- /dev/null +++ b/tests/functional/configs/test_contract_configs.py @@ -0,0 +1,532 @@ +import os + +from dbt.exceptions import ParsingError +from dbt.tests.util import ( + get_artifact, + get_manifest, + run_dbt, + run_dbt_and_capture, + write_file, +) +from dbt_common.exceptions import ValidationError +import pytest + + +my_model_sql = """ +{{ + config( + materialized = "table" + ) +}} + +select + 'blue' as color, + 1 as id, + cast('2019-01-01' as date) as date_day +""" + +my_model_contract_sql = """ +{{ + config( + materialized = "table", + contract = {"enforced": true} + ) +}} + +select + 1 as id, + 'blue' as color, + cast('2019-01-01' as date) as date_day +""" + +my_model_contract_disabled_sql = """ +{{ + config( + materialized = "table", + contract = {"enforced": false} + ) +}} + +select + 1 as id, + 'blue' as color, + cast('2019-01-01' as date) as date_day +""" + +my_incremental_model_sql = """ +{{ + config( + materialized = "incremental" + ) +}} + +select + 1 as id, + 'blue' as color, + cast('2019-01-01' as date) as date_day +""" + +my_view_model_sql = """ +{{ + config( + materialized = "view" + ) +}} + +select + 1 as id, + 'blue' as color, + cast('2019-01-01' as date) as date_day +""" + +my_model_python_error = """ +import holidays, s3fs + + +def model(dbt, _): + dbt.config( + materialized="table", + packages=["holidays", "s3fs"], # how to import python libraries in dbt's context + ) + df = dbt.ref("my_model") + df_describe = df.describe() # basic statistics profiling + return df_describe +""" + +model_schema_yml = """ +version: 2 +models: + - name: my_model + config: + contract: + enforced: true + columns: + - name: id + quote: true + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + data_tests: + - unique + - name: color + data_type: string + - name: date_day + data_type: date +""" + +model_schema_alias_types_false_yml = """ +version: 2 +models: + - name: my_model + config: + contract: + enforced: true + alias_types: false + columns: + - name: id + quote: true + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + data_tests: + - unique + - name: color + data_type: string + - name: date_day + data_type: date +""" + +model_schema_ignore_unsupported_yml = """ +version: 2 +models: + - name: my_model + config: + contract: + enforced: true + columns: + - name: id + quote: true + data_type: integer + description: hello + constraints: + - type: not_null + warn_unsupported: False + - type: primary_key + warn_unsupported: False + - type: check + warn_unsupported: False + expression: (id > 0) + data_tests: + - unique + - name: color + data_type: text + - name: date_day + data_type: date +""" + +model_schema_errors_yml = """ +version: 2 +models: + - name: my_model + config: + contract: + enforced: true + columns: + - name: id + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + data_tests: + - unique + - name: color + data_type: text + - name: date_day + - name: python_model + config: + contract: + enforced: true + columns: + - name: id + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + data_tests: + - unique + - name: color + data_type: text + - name: date_day + data_type: date +""" + +model_schema_blank_yml = """ +version: 2 +models: + - name: my_model + config: + contract: + enforced: true +""" + +model_schema_complete_datatypes_yml = """ +version: 2 +models: + - name: my_model + columns: + - name: id + quote: true + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + data_tests: + - unique + - name: color + data_type: text + - name: date_day + data_type: date +""" + +model_schema_incomplete_datatypes_yml = """ +version: 2 +models: + - name: my_model + columns: + - name: id + quote: true + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + data_tests: + - unique + - name: color + - name: date_day + data_type: date +""" + + +class TestModelLevelContractEnabledConfigs: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "constraints_schema.yml": model_schema_yml, + } + + def test__model_contract_true(self, project): + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model" + model = manifest.nodes[model_id] + my_model_columns = model.columns + my_model_config = model.config + contract_actual_config = my_model_config.contract + + assert contract_actual_config.enforced is True + + expected_columns = "{'id': ColumnInfo(name='id', description='hello', meta={}, data_type='integer', constraints=[ColumnLevelConstraint(type=<ConstraintType.not_null: 'not_null'>, name=None, expression=None, warn_unenforced=True, warn_unsupported=True), ColumnLevelConstraint(type=<ConstraintType.primary_key: 'primary_key'>, name=None, expression=None, warn_unenforced=True, warn_unsupported=True), ColumnLevelConstraint(type=<ConstraintType.check: 'check'>, name=None, expression='(id > 0)', warn_unenforced=True, warn_unsupported=True)], quote=True, tags=[], _extra={}), 'color': ColumnInfo(name='color', description='', meta={}, data_type='string', constraints=[], quote=None, tags=[], _extra={}), 'date_day': ColumnInfo(name='date_day', description='', meta={}, data_type='date', constraints=[], quote=None, tags=[], _extra={})}" + + assert expected_columns == str(my_model_columns) + + # compiled fields aren't in the manifest above because it only has parsed fields + manifest_json = get_artifact(project.project_root, "target", "manifest.json") + compiled_code = manifest_json["nodes"][model_id]["compiled_code"] + cleaned_code = " ".join(compiled_code.split()) + assert ( + "select 'blue' as color, 1 as id, cast('2019-01-01' as date) as date_day" + == cleaned_code + ) + + # set alias_types to false (should fail to compile) + write_file( + model_schema_alias_types_false_yml, + project.project_root, + "models", + "constraints_schema.yml", + ) + run_dbt(["run"], expect_pass=False) + + +class TestProjectContractEnabledConfigs: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"test": {"+contract": {"enforced": True}}}} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "constraints_schema.yml": model_schema_complete_datatypes_yml, + } + + def test_defined_column_type(self, project): + run_dbt(["run"], expect_pass=True) + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model" + my_model_config = manifest.nodes[model_id].config + contract_actual_config = my_model_config.contract + assert contract_actual_config.enforced is True + + +class TestProjectContractEnabledConfigsError: + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "+contract": { + "enforced": True, + }, + } + } + } + + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "constraints_schema.yml": model_schema_incomplete_datatypes_yml, + } + + def test_undefined_column_type(self, project): + _, log_output = run_dbt_and_capture(["run", "-s", "my_model"], expect_pass=False) + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model" + my_model_config = manifest.nodes[model_id].config + contract_actual_config = my_model_config.contract + + assert contract_actual_config.enforced is True + + expected_compile_error = "Please ensure that the column name and data_type are defined within the YAML configuration for the ['color'] column(s)." + + assert expected_compile_error in log_output + + +class TestModelContractEnabledConfigs: + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": my_model_contract_sql, "constraints_schema.yml": model_schema_yml} + + def test__model_contract(self, project): + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model" + my_model_config = manifest.nodes[model_id].config + contract_actual_config = my_model_config.contract + assert contract_actual_config.enforced is True + + +class TestModelContractEnabledConfigsMissingDataTypes: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_contract_sql, + "constraints_schema.yml": model_schema_incomplete_datatypes_yml, + } + + def test_undefined_column_type(self, project): + _, log_output = run_dbt_and_capture(["run", "-s", "my_model"], expect_pass=False) + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model" + my_model_config = manifest.nodes[model_id].config + contract_actual_config = my_model_config.contract + + assert contract_actual_config.enforced is True + + expected_compile_error = "Please ensure that the column name and data_type are defined within the YAML configuration for the ['color'] column(s)." + + assert expected_compile_error in log_output + + +class TestModelLevelContractDisabledConfigs: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_contract_disabled_sql, + "constraints_schema.yml": model_schema_yml, + } + + def test__model_contract_false(self, project): + + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model" + my_model_config = manifest.nodes[model_id].config + contract_actual_config = my_model_config.contract + + assert contract_actual_config.enforced is False + + +class TestModelLevelContractErrorMessages: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_incremental_model_sql, + "constraints_schema.yml": model_schema_yml, + } + + def test__config_errors(self, project): + with pytest.raises(ValidationError) as err_info: + run_dbt(["run"], expect_pass=False) + + exc_str = " ".join(str(err_info.value).split()) + expected_materialization_error = "Invalid value for on_schema_change: ignore. Models materialized as incremental with contracts enabled must set on_schema_change to 'append_new_columns' or 'fail'" + assert expected_materialization_error in str(exc_str) + + +class TestModelLevelConstraintsErrorMessages: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.py": my_model_python_error, + "constraints_schema.yml": model_schema_yml, + } + + def test__config_errors(self, project): + with pytest.raises(ParsingError) as err_info: + run_dbt(["run"], expect_pass=False) + + exc_str = " ".join(str(err_info.value).split()) + expected_materialization_error = "Language Error: Expected 'sql' but found 'python'" + assert expected_materialization_error in str(exc_str) + # This is a compile time error and we won't get here because the materialization check is parse time + expected_empty_data_type_error = "Columns with `data_type` Blank/Null not allowed on contracted models. Columns Blank/Null: ['date_day']" + assert expected_empty_data_type_error not in str(exc_str) + + +class TestModelLevelConstraintsWarningMessages: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_view_model_sql, + "constraints_schema.yml": model_schema_yml, + } + + def test__config_warning(self, project): + _, log_output = run_dbt_and_capture(["run"]) + + expected_materialization_warning = ( + "Constraint types are not supported for view materializations" + ) + assert expected_materialization_warning in str(log_output) + + # change to not show warnings, message should not be in logs + models_dir = os.path.join(project.project_root, "models") + write_file(model_schema_ignore_unsupported_yml, models_dir, "constraints_schema.yml") + _, log_output = run_dbt_and_capture(["run"]) + + expected_materialization_warning = ( + "Constraint types are not supported for view materializations" + ) + assert expected_materialization_warning not in str(log_output) + + +class TestSchemaContractEnabledConfigs: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "constraints_schema.yml": model_schema_blank_yml, + } + + def test__schema_error(self, project): + with pytest.raises(ParsingError) as err_info: + run_dbt(["parse"], expect_pass=False) + + exc_str = " ".join(str(err_info.value).split()) + schema_error_expected = "Constraints must be defined in a `yml` schema configuration file" + assert schema_error_expected in str(exc_str) + + +class TestPythonModelLevelContractErrorMessages: + @pytest.fixture(scope="class") + def models(self): + return { + "python_model.py": my_model_python_error, + "constraints_schema.yml": model_schema_errors_yml, + } + + def test__python_errors(self, project): + with pytest.raises(ParsingError) as err_info: + run_dbt(["parse"], expect_pass=False) + + exc_str = " ".join(str(err_info.value).split()) + expected_python_error = "Language Error: Expected 'sql' but found 'python'" + assert expected_python_error in exc_str + + +class TestModelContractMissingYAMLColumns: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_contract_sql, + } + + def test__missing_column_contract_error(self, project): + results = run_dbt(["run"], expect_pass=False) + expected_error = ( + "This model has an enforced contract, and its 'columns' specification is missing" + ) + assert expected_error in results[0].message diff --git a/tests/functional/configs/test_custom_node_colors_configs.py b/tests/functional/configs/test_custom_node_colors_configs.py new file mode 100644 index 000000000..4993a9ae1 --- /dev/null +++ b/tests/functional/configs/test_custom_node_colors_configs.py @@ -0,0 +1,345 @@ +from dbt.tests.util import get_manifest, run_dbt +from dbt_common.dataclass_schema import ValidationError +import pytest + + +CUSTOM_NODE_COLOR_MODEL_LEVEL = "red" +CUSTOM_NODE_COLOR_SCHEMA_LEVEL = "blue" +CUSTOM_NODE_COLOR_PROJECT_LEVEL_ROOT = "#121212" +CUSTOM_NODE_COLOR_PROJECT_LEVEL_FOLDER = "purple" +CUSTOM_NODE_COLOR_INVALID_HEX = '"#xxx111"' +CUSTOM_NODE_COLOR_INVALID_NAME = "notacolor" + +# F strings are a pain here so replacing XXX with the config above instead +models__custom_node_color__model_sql = """ +{{ config(materialized='view', docs={'node_color': 'XXX'}) }} + +select 1 as id + +""".replace( + "XXX", CUSTOM_NODE_COLOR_MODEL_LEVEL +) + +models__non_custom_node_color__model_sql = """ +{{ config(materialized='view') }} + +select 1 as id + +""" + +models__show_docs_false__model_sql = """ +{{ config(materialized='view', docs={"show": True}) }} + +select 1 as id +""" + +models__custom_node_color__schema_yml = """ +version: 2 + +models: + - name: custom_color_model + description: "This is a model description" + config: + docs: + node_color: {} +""".format( + CUSTOM_NODE_COLOR_SCHEMA_LEVEL +) + + +models__non_custom_node_color__schema_yml = """ +version: 2 + +models: + - name: non_custom_color_model + description: "This is a model description" + config: + docs: + node_color: {} + show: True +""".format( + CUSTOM_NODE_COLOR_SCHEMA_LEVEL +) + +# To check that incorect configs are raising errors +models__non_custom_node_color_invalid_config_docs__schema_yml = """ +version: 2 + +models: + - name: non_custom_node_color + description: "This is a model description" + config: + docs: + node_color: {} + show: True +""".format( + CUSTOM_NODE_COLOR_INVALID_HEX +) + +models__non_custom_node_color_invalid_docs__schema_yml = """ +version: 2 + +models: + - name: non_custom_node_color + description: "This is a model description" + docs: + node_color: {} + show: True +""".format( + CUSTOM_NODE_COLOR_INVALID_NAME +) + +models__custom_node_color_invalid_hex__model_sql = """ +{{ config(materialized='view', docs={"show": True, "node_color": XXX }) }} + +select 1 as id +""".replace( + "XXX", CUSTOM_NODE_COLOR_INVALID_HEX +) + + +class BaseCustomNodeColorModelvsProject: + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "+docs": {"node_color": CUSTOM_NODE_COLOR_PROJECT_LEVEL_ROOT, "show": False}, + "subdirectory": { + "+docs": { + "node_color": CUSTOM_NODE_COLOR_PROJECT_LEVEL_FOLDER, + "show": True, + }, + }, + } + } + } + + +# validation that model level node_color configs supercede dbt_project.yml +class TestModelLevelProjectColorConfigs(BaseCustomNodeColorModelvsProject): + @pytest.fixture(scope="class") + def models(self): + return {"custom_color_model.sql": models__custom_node_color__model_sql} + + def test__model_override_project(self, project): + + run_dbt(["compile"]) + manifest = get_manifest(project.project_root) + model_id = "model.test.custom_color_model" + my_model_config = manifest.nodes[model_id].config + my_model_docs = manifest.nodes[model_id].docs + + node_color_actual_config = my_model_config["docs"].node_color + show_actual_config = my_model_config["docs"].show + node_color_actual_docs = my_model_docs.node_color + show_actual_docs = my_model_docs.show + + # check node_color config is in the right spots for each model + assert node_color_actual_config == CUSTOM_NODE_COLOR_MODEL_LEVEL + assert node_color_actual_docs == CUSTOM_NODE_COLOR_MODEL_LEVEL + assert not show_actual_config + assert not show_actual_docs + + +# validation that model level node_color configs supercede schema.yml +class TestModelLevelSchemaColorConfigs(BaseCustomNodeColorModelvsProject): + @pytest.fixture(scope="class") + def models(self): + return { + "custom_color_model.sql": models__custom_node_color__model_sql, + "custom_color_schema.yml": models__custom_node_color__schema_yml, + } + + def test__model_override_schema(self, project): + + run_dbt(["compile"]) + manifest = get_manifest(project.project_root) + model_id = "model.test.custom_color_model" + my_model_config = manifest.nodes[model_id].config + my_model_docs = manifest.nodes[model_id].docs + + node_color_actual_config = my_model_config["docs"].node_color + show_actual_config = my_model_config["docs"].show + node_color_actual_docs = my_model_docs.node_color + show_actual_docs = my_model_docs.show + + # check node_color config is in the right spots for each model + assert node_color_actual_config == CUSTOM_NODE_COLOR_MODEL_LEVEL + assert node_color_actual_docs == CUSTOM_NODE_COLOR_MODEL_LEVEL + assert not show_actual_config + assert not show_actual_docs + + +# validation that node_color configured on subdirectories in dbt_project.yml supercedes project root +class TestSubdirectoryColorConfigs(BaseCustomNodeColorModelvsProject): + @pytest.fixture(scope="class") + def models(self): + return { + "subdirectory": { + "non_custom_color_model_subdirectory.sql": models__non_custom_node_color__model_sql + } + } + + def test__project_folder_override_project_root(self, project): + run_dbt(["compile"]) + manifest = get_manifest(project.project_root) + model_id = "model.test.non_custom_color_model_subdirectory" + my_model_config = manifest.nodes[model_id].config + my_model_docs = manifest.nodes[model_id].docs + + node_color_actual_config = my_model_config["docs"].node_color + show_actual_config = my_model_config["docs"].show + node_color_actual_docs = my_model_docs.node_color + show_actual_docs = my_model_docs.show + + # check node_color config is in the right spots for each model + assert node_color_actual_config == CUSTOM_NODE_COLOR_PROJECT_LEVEL_FOLDER + assert node_color_actual_docs == CUSTOM_NODE_COLOR_PROJECT_LEVEL_FOLDER + # in this case show should be True since the dbt_project.yml overrides the root setting for /subdirectory + assert show_actual_config + assert show_actual_docs + + +# validation that node_color configured in schema.yml supercedes dbt_project.yml +class TestSchemaOverProjectColorConfigs(BaseCustomNodeColorModelvsProject): + @pytest.fixture(scope="class") + def models(self): + return { + "non_custom_color_model.sql": models__non_custom_node_color__model_sql, + "non_custom_color_schema.yml": models__non_custom_node_color__schema_yml, + } + + def test__schema_override_project( + self, + project, + ): + + run_dbt(["compile"]) + manifest = get_manifest(project.project_root) + + model_id = "model.test.non_custom_color_model" + my_model_config = manifest.nodes[model_id].config + my_model_docs = manifest.nodes[model_id].docs + + node_color_actual_config = my_model_config["docs"].node_color + show_actual_config = my_model_config["docs"].show + node_color_actual_docs = my_model_docs.node_color + show_actual_docs = my_model_docs.show + + # check node_color config is in the right spots for each model + assert node_color_actual_config == CUSTOM_NODE_COLOR_SCHEMA_LEVEL + assert node_color_actual_docs == CUSTOM_NODE_COLOR_SCHEMA_LEVEL + # in this case show should be True since the schema.yml overrides the dbt_project.yml + assert show_actual_config + assert show_actual_docs + + +# validation that docs: show configured in model file supercedes dbt_project.yml +class TestModelOverProjectColorConfigs(BaseCustomNodeColorModelvsProject): + @pytest.fixture(scope="class") + def models(self): + return {"show_docs_override_model.sql": models__show_docs_false__model_sql} + + def test__model_show_overrides_dbt_project( + self, + project, + ): + + run_dbt(["compile"]) + manifest = get_manifest(project.project_root) + + model_id = "model.test.show_docs_override_model" + my_model_config = manifest.nodes[model_id].config + my_model_docs = manifest.nodes[model_id].docs + + node_color_actual_config = my_model_config["docs"].node_color + show_actual_config = my_model_config["docs"].show + node_color_actual_docs = my_model_docs.node_color + show_actual_docs = my_model_docs.show + + # check node_color config is in the right spots for each model + assert node_color_actual_config == CUSTOM_NODE_COLOR_PROJECT_LEVEL_ROOT + assert node_color_actual_docs == CUSTOM_NODE_COLOR_PROJECT_LEVEL_ROOT + # in this case show should be True since the schema.yml overrides the dbt_project.yml + assert show_actual_config + assert show_actual_docs + + +# validation that an incorrect color in dbt_project.yml raises an exception +class TestCustomNodeColorIncorrectColorProject: + @pytest.fixture(scope="class") + def models(self): # noqa: F811 + return {"non_custom_node_color.sql": models__non_custom_node_color__model_sql} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": {"+docs": {"node_color": CUSTOM_NODE_COLOR_INVALID_NAME, "show": False}} + } + } + + def test__invalid_color_project( + self, + project, + ): + with pytest.raises(ValidationError): + run_dbt(["compile"]) + + +# validation that an incorrect color in the config block raises an exception +class TestCustomNodeColorIncorrectColorModelConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "custom_node_color_invalid_hex.sql": models__custom_node_color_invalid_hex__model_sql + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"+docs": {"node_color": "blue", "show": False}}} + + def test__invalid_color_config_block( + self, + project, + ): + with pytest.raises(ValidationError): + run_dbt(["compile"]) + + +# validation that an incorrect color in the YML file raises an exception +class TestCustomNodeColorIncorrectColorNameYMLConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "non_custom_node_color.sql": models__non_custom_node_color__model_sql, + "invalid_custom_color.yml": models__non_custom_node_color_invalid_docs__schema_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"+docs": {"node_color": "blue", "show": False}}} + + def test__invalid_color_docs_not_under_config( + self, + project, + ): + with pytest.raises(ValidationError): + run_dbt(["compile"]) + + +class TestCustomNodeColorIncorrectColorHEXYMLConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "non_custom_node_color.sql": models__non_custom_node_color__model_sql, + "invalid_custom_color.yml": models__non_custom_node_color_invalid_config_docs__schema_yml, + } + + def test__invalid_color_docs_under_config( + self, + project, + ): + with pytest.raises(ValidationError): + run_dbt(["compile"]) diff --git a/tests/functional/configs/test_disabled_configs.py b/tests/functional/configs/test_disabled_configs.py new file mode 100644 index 000000000..a8af7d461 --- /dev/null +++ b/tests/functional/configs/test_disabled_configs.py @@ -0,0 +1,90 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.configs.fixtures import BaseConfigProject + + +class TestDisabledConfigs(BaseConfigProject): + @pytest.fixture(scope="class") + def dbt_profile_data(self, unique_schema): + return { + "test": { + "outputs": { + "default": { + "type": "postgres", + # make sure you can do this and get an int out + "threads": "{{ (1 + 3) | as_number }}", + "host": "localhost", + "port": "{{ (5400 + 32) | as_number }}", + "user": "root", + "pass": "password", + "dbname": "dbt", + "schema": unique_schema, + }, + "disabled": { + "type": "postgres", + # make sure you can do this and get an int out + "threads": "{{ (1 + 3) | as_number }}", + "host": "localhost", + "port": "{{ (5400 + 32) | as_number }}", + "user": "root", + "pass": "password", + "dbname": "dbt", + "schema": unique_schema, + }, + }, + "target": "default", + }, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "enabled": "{{ (target.name == 'default' | as_bool) }}", + }, + }, + # set the `var` result in schema.yml to be 'seed', so that the + # `source` call can suceed. + "vars": { + "test": { + "seed_name": "seed", + } + }, + "seeds": { + "quote_columns": False, + "test": { + "seed": { + "enabled": "{{ (target.name == 'default') | as_bool }}", + }, + }, + }, + "data_tests": { + "test": { + "enabled": "{{ (target.name == 'default') | as_bool }}", + "severity": "WARN", + }, + }, + } + + def test_disable_seed_partial_parse(self, project): + run_dbt(["--partial-parse", "seed", "--target", "disabled"]) + run_dbt(["--partial-parse", "seed", "--target", "disabled"]) + + def test_conditional_model(self, project): + # no seeds/models - enabled should eval to False because of the target + results = run_dbt(["seed", "--target", "disabled"]) + assert len(results) == 0 + results = run_dbt(["run", "--target", "disabled"]) + assert len(results) == 0 + results = run_dbt(["test", "--target", "disabled"]) + assert len(results) == 0 + + # has seeds/models - enabled should eval to True because of the target + results = run_dbt(["seed"]) + assert len(results) == 1 + results = run_dbt(["run"]) + assert len(results) == 2 + results = run_dbt(["test"]) + assert len(results) == 5 diff --git a/tests/functional/configs/test_disabled_model.py b/tests/functional/configs/test_disabled_model.py new file mode 100644 index 000000000..d724cd956 --- /dev/null +++ b/tests/functional/configs/test_disabled_model.py @@ -0,0 +1,390 @@ +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, run_dbt +from dbt_common.dataclass_schema import ValidationError +from dbt_common.exceptions import CompilationError +import pytest + +import fixtures + + +# ensure double disabled doesn't throw error when set at schema level +class TestSchemaDisabledConfigs: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": fixtures.schema_all_disabled_yml, + "my_model.sql": fixtures.my_model, + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3, + } + + def test_disabled_config(self, project): + run_dbt(["parse"]) + + +# ensure this throws a specific error that the model is disabled +class TestSchemaDisabledConfigsFailure: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": fixtures.schema_partial_disabled_yml, + "my_model.sql": fixtures.my_model, + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3, + } + + def test_disabled_config(self, project): + with pytest.raises(CompilationError) as exc: + run_dbt(["parse"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "which is disabled" + assert expected_msg in exc_str + + +# ensure double disabled doesn't throw error when set in model configs +class TestModelDisabledConfigs: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": fixtures.my_model, + "my_model_2.sql": fixtures.my_model_2_disabled, + "my_model_3.sql": fixtures.my_model_3_disabled, + } + + def test_disabled_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "model.test.my_model_2" not in manifest.nodes + assert "model.test.my_model_3" not in manifest.nodes + + assert "model.test.my_model_2" in manifest.disabled + assert "model.test.my_model_3" in manifest.disabled + + +# ensure config set in project.yml can be overridden in yaml file +class TestOverrideProjectConfigsInYaml: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": fixtures.schema_partial_enabled_yml, + "my_model.sql": fixtures.my_model, + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "my_model_2": { + "enabled": False, + }, + "my_model_3": { + "enabled": False, + }, + }, + } + } + + def test_override_project_yaml_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "model.test.my_model_2" in manifest.nodes + assert "model.test.my_model_3" not in manifest.nodes + + assert "model.test.my_model_2" not in manifest.disabled + assert "model.test.my_model_3" in manifest.disabled + + +# ensure config set in project.yml can be overridden in sql file +class TestOverrideProjectConfigsInSQL: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": fixtures.my_model, + "my_model_2.sql": fixtures.my_model_2_enabled, + "my_model_3.sql": fixtures.my_model_3, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "my_model_2": { + "enabled": False, + }, + "my_model_3": { + "enabled": False, + }, + }, + } + } + + def test_override_project_sql_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "model.test.my_model_2" in manifest.nodes + assert "model.test.my_model_3" not in manifest.nodes + + assert "model.test.my_model_2" not in manifest.disabled + assert "model.test.my_model_3" in manifest.disabled + + +# ensure false config set in yaml file can be overridden in sql file +class TestOverrideFalseYAMLConfigsInSQL: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": fixtures.schema_all_disabled_yml, + "my_model.sql": fixtures.my_model, + "my_model_2.sql": fixtures.my_model_2_enabled, + "my_model_3.sql": fixtures.my_model_3, + } + + def test_override_yaml_sql_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "model.test.my_model_2" in manifest.nodes + assert "model.test.my_model_3" not in manifest.nodes + + assert "model.test.my_model_2" not in manifest.disabled + assert "model.test.my_model_3" in manifest.disabled + + +# ensure true config set in yaml file can be overridden by false in sql file +class TestOverrideTrueYAMLConfigsInSQL: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": fixtures.schema_explicit_enabled_yml, + "my_model.sql": fixtures.my_model, + "my_model_2.sql": fixtures.my_model_2_enabled, + "my_model_3.sql": fixtures.my_model_3_disabled, + } + + def test_override_yaml_sql_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "model.test.my_model_2" in manifest.nodes + assert "model.test.my_model_3" not in manifest.nodes + + assert "model.test.my_model_2" not in manifest.disabled + assert "model.test.my_model_3" in manifest.disabled + + +# ensure error when enabling in schema file when multiple nodes exist within disabled +class TestMultipleDisabledNodesForUniqueIDFailure: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": fixtures.schema_partial_enabled_yml, + "my_model.sql": fixtures.my_model, + "folder_1": { + "my_model_2.sql": fixtures.my_model_2_disabled, + "my_model_3.sql": fixtures.my_model_3_disabled, + }, + "folder_2": { + "my_model_2.sql": fixtures.my_model_2_disabled, + "my_model_3.sql": fixtures.my_model_3_disabled, + }, + "folder_3": { + "my_model_2.sql": fixtures.my_model_2_disabled, + "my_model_3.sql": fixtures.my_model_3_disabled, + }, + } + + def test_disabled_config(self, project): + with pytest.raises(ParsingError) as exc: + run_dbt(["parse"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "Found 3 matching disabled nodes for model 'my_model_2'" + assert expected_msg in exc_str + + +# ensure error when enabling in schema file when multiple nodes exist within disabled +class TestMultipleDisabledNodesSuccess: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": fixtures.my_model, + "folder_1": { + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3, + }, + "folder_2": { + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3, + }, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "folder_1": { + "enabled": False, + }, + "folder_2": { + "enabled": True, + }, + }, + } + } + + def test_multiple_disabled_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "model.test.my_model_2" in manifest.nodes + assert "model.test.my_model_3" in manifest.nodes + + expected_file_path = "folder_2" + assert expected_file_path in manifest.nodes["model.test.my_model_2"].original_file_path + assert expected_file_path in manifest.nodes["model.test.my_model_3"].original_file_path + + assert "model.test.my_model_2" in manifest.disabled + assert "model.test.my_model_3" in manifest.disabled + + expected_disabled_file_path = "folder_1" + assert ( + expected_disabled_file_path + in manifest.disabled["model.test.my_model_2"][0].original_file_path + ) + assert ( + expected_disabled_file_path + in manifest.disabled["model.test.my_model_3"][0].original_file_path + ) + + +# ensure overrides work when enabling in sql file when multiple nodes exist within disabled +class TestMultipleDisabledNodesOverrideModel: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": fixtures.my_model, + "folder_1": { + "my_model_2.sql": fixtures.my_model_2_enabled, + "my_model_3.sql": fixtures.my_model_3, + }, + "folder_2": { + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3_enabled, + }, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "folder_1": { + "enabled": False, + }, + "folder_2": { + "enabled": False, + }, + }, + } + } + + def test_multiple_disabled_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "model.test.my_model_2" in manifest.nodes + assert "model.test.my_model_3" in manifest.nodes + + expected_file_path_2 = "folder_1" + assert expected_file_path_2 in manifest.nodes["model.test.my_model_2"].original_file_path + expected_file_path_3 = "folder_2" + assert expected_file_path_3 in manifest.nodes["model.test.my_model_3"].original_file_path + + assert "model.test.my_model_2" in manifest.disabled + assert "model.test.my_model_3" in manifest.disabled + + expected_disabled_file_path_2 = "folder_2" + assert ( + expected_disabled_file_path_2 + in manifest.disabled["model.test.my_model_2"][0].original_file_path + ) + expected_disabled_file_path_3 = "folder_1" + assert ( + expected_disabled_file_path_3 + in manifest.disabled["model.test.my_model_3"][0].original_file_path + ) + + +# ensure everything lands where it should when disabling multiple nodes with the same unique id +class TestManyDisabledNodesSuccess: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": fixtures.my_model, + "folder_1": { + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3, + }, + "folder_2": { + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3, + }, + "folder_3": { + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3, + }, + "folder_4": { + "my_model_2.sql": fixtures.my_model_2, + "my_model_3.sql": fixtures.my_model_3, + }, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "folder_1": { + "enabled": False, + }, + "folder_2": { + "enabled": True, + }, + "folder_3": { + "enabled": False, + }, + "folder_4": { + "enabled": False, + }, + }, + } + } + + def test_many_disabled_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "model.test.my_model_2" in manifest.nodes + assert "model.test.my_model_3" in manifest.nodes + + expected_file_path = "folder_2" + assert expected_file_path in manifest.nodes["model.test.my_model_2"].original_file_path + assert expected_file_path in manifest.nodes["model.test.my_model_3"].original_file_path + + assert len(manifest.disabled["model.test.my_model_2"]) == 3 + assert len(manifest.disabled["model.test.my_model_3"]) == 3 + + +class TestInvalidEnabledConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": fixtures.schema_invalid_enabled_yml, + "my_model.sql": fixtures.my_model, + } + + def test_invalis_config(self, project): + with pytest.raises(ValidationError) as exc: + run_dbt(["parse"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "'True and False' is not of type 'boolean'" + assert expected_msg in exc_str diff --git a/tests/functional/configs/test_dupe_paths.py b/tests/functional/configs/test_dupe_paths.py new file mode 100644 index 000000000..b9a98d21c --- /dev/null +++ b/tests/functional/configs/test_dupe_paths.py @@ -0,0 +1,74 @@ +from dbt.tests.util import run_dbt +import pytest + + +my_model_sql = """ +select 1 as fun +""" + +seed_csv = """id,value +4,2 +""" + +somedoc_md = """ +{% docs somedoc %} +Testing, testing +{% enddocs %} +""" + +schema_yml = """ +version: 2 +models: + - name: my_model + description: testing model +""" + + +# Either a docs or a yml file is necessary to see the problem +# when two of the paths in 'all_source_paths' are the same +class TestDupeProjectPaths: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "seed.csv": seed_csv, + "somedoc.md": somedoc_md, + "schema.yml": schema_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "model-paths": ["models"], + "seed-paths": ["models"], + } + + def test_config_with_dupe_paths(self, project, dbt_project_yml): + results = run_dbt(["seed"]) + assert len(results) == 1 + results = run_dbt(["run"]) + assert len(results) == 1 + + +class TestDupeStrippedProjectPaths: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "seed.csv": seed_csv, + "somedoc.md": somedoc_md, + "schema.yml": schema_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "model-paths": ["models/"], + "seed-paths": ["models"], + } + + def test_config_with_dupe_paths(self, project, dbt_project_yml): + results = run_dbt(["seed"]) + assert len(results) == 1 + results = run_dbt(["run"]) + assert len(results) == 1 diff --git a/tests/functional/configs/test_get_default.py b/tests/functional/configs/test_get_default.py new file mode 100644 index 000000000..36d420e08 --- /dev/null +++ b/tests/functional/configs/test_get_default.py @@ -0,0 +1,26 @@ +from dbt.tests.util import run_dbt +import pytest + + +models_get__any_model_sql = """ +-- models/any_model.sql +select {{ config.get('made_up_nonexistent_key', 'default_value') }} as col_value + +""" + + +class TestConfigGetDefault: + @pytest.fixture(scope="class") + def models(self): + return {"any_model.sql": models_get__any_model_sql} + + def test_config_with_get_default( + self, + project, + ): + # This test runs a model with a config.get(key, default) + # The default value is 'default_value' and causes an error + results = run_dbt(["run"], expect_pass=False) + assert len(results) == 1 + assert str(results[0].status) == "error" + assert 'column "default_value" does not exist' in results[0].message diff --git a/tests/functional/configs/test_grant_configs.py b/tests/functional/configs/test_grant_configs.py new file mode 100644 index 000000000..23b884a16 --- /dev/null +++ b/tests/functional/configs/test_grant_configs.py @@ -0,0 +1,155 @@ +from dbt.tests.util import ( + get_manifest, + run_dbt, + write_config_file, + write_file, +) +import pytest + + +dbt_project_yml = """ +models: + test: + my_model: + +grants: + my_select: ["reporter", "bi"] +""" + +append_schema_yml = """ +version: 2 +models: + - name: my_model + config: + grants: + +my_select: ["someone"] +""" + + +my_model_base_sql = """ +select 1 as fun +""" + + +my_model_clobber_sql = """ +{{ config(grants={'my_select': ['other_user']}) }} +select 1 as fun +""" + +my_model_extend_sql = """ +{{ config(grants={'+my_select': ['other_user']}) }} +select 1 as fun +""" + +my_model_extend_string_sql = """ +{{ config(grants={'+my_select': 'other_user'}) }} +select 1 as fun +""" + +my_model_extend_twice_sql = """ +{{ config(grants={'+my_select': ['other_user']}) }} +{{ config(grants={'+my_select': ['alt_user']}) }} +select 1 as fun +""" + + +class TestGrantConfigs: + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": my_model_base_sql} + + @pytest.fixture(scope="class") + def project_config_update(self): + return dbt_project_yml + + def test_model_grant_config(self, project, logs_dir): + # This test uses "my_select" instead of "select", so we need + # use "parse" instead of "run" because we will get compilation + # errors for the grants. + run_dbt(["parse"]) + + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model" + assert model_id in manifest.nodes + + model = manifest.nodes[model_id] + model_config = model.config + assert hasattr(model_config, "grants") + + # no schema grant, no model grant, just project + expected = {"my_select": ["reporter", "bi"]} + assert model_config.grants == expected + + # add model grant with clobber + write_file(my_model_clobber_sql, project.project_root, "models", "my_model.sql") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + model_config = manifest.nodes[model_id].config + + expected = {"my_select": ["other_user"]} + assert model_config.grants == expected + + # change model to extend grants + write_file(my_model_extend_sql, project.project_root, "models", "my_model.sql") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + model_config = manifest.nodes[model_id].config + + expected = {"my_select": ["reporter", "bi", "other_user"]} + assert model_config.grants == expected + + # add schema file with extend + write_file(append_schema_yml, project.project_root, "models", "schema.yml") + run_dbt(["parse"]) + + manifest = get_manifest(project.project_root) + model_config = manifest.nodes[model_id].config + + expected = {"my_select": ["reporter", "bi", "someone", "other_user"]} + assert model_config.grants == expected + + # change model file to have string instead of list + write_file(my_model_extend_string_sql, project.project_root, "models", "my_model.sql") + run_dbt(["parse"]) + + manifest = get_manifest(project.project_root) + model_config = manifest.nodes[model_id].config + + expected = {"my_select": ["reporter", "bi", "someone", "other_user"]} + assert model_config.grants == expected + + # change model file to have string instead of list + write_file(my_model_extend_twice_sql, project.project_root, "models", "my_model.sql") + run_dbt(["parse"]) + + manifest = get_manifest(project.project_root) + model_config = manifest.nodes[model_id].config + + expected = {"my_select": ["reporter", "bi", "someone", "other_user", "alt_user"]} + assert model_config.grants == expected + + # Remove grant from dbt_project + config = { + "config-version": 2, + "name": "test", + "version": "0.1.0", + "profile": "test", + "log-path": logs_dir, + } + write_config_file(config, project.project_root, "dbt_project.yml") + run_dbt(["parse"]) + + manifest = get_manifest(project.project_root) + model_config = manifest.nodes[model_id].config + + expected = {"my_select": ["someone", "other_user", "alt_user"]} + assert model_config.grants == expected + + # Remove my_model config, leaving only schema file + write_file(my_model_base_sql, project.project_root, "models", "my_model.sql") + run_dbt(["parse"]) + + manifest = get_manifest(project.project_root) + model_config = manifest.nodes[model_id].config + + expected = {"my_select": ["someone"]} + assert model_config.grants == expected diff --git a/tests/functional/configs/test_indiv_tests.py b/tests/functional/configs/test_indiv_tests.py new file mode 100644 index 000000000..1084760a2 --- /dev/null +++ b/tests/functional/configs/test_indiv_tests.py @@ -0,0 +1,58 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.configs.fixtures import BaseConfigProject + + +class TestConfigIndivTests(BaseConfigProject): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + }, + "vars": { + "test": { + "seed_name": "seed", + } + }, + "data_tests": {"test": {"enabled": True, "severity": "WARN"}}, + } + + def test_configuring_individual_tests( + self, + project, + ): + assert len(run_dbt(["seed"])) == 1 + assert len(run_dbt(["run"])) == 2 + + # all tests on (minus sleeper_agent) + WARN + assert len(run_dbt(["test"])) == 5 + + # turn off two of them directly + assert len(run_dbt(["test", "--vars", '{"enabled_direct": False}'])) == 3 + + # turn on sleeper_agent data test directly + assert ( + len( + run_dbt( + ["test", "--models", "sleeper_agent", "--vars", '{"enabled_direct": True}'] + ) + ) + == 1 + ) + + # set three to ERROR directly + results = run_dbt( + [ + "test", + "--models", + "config.severity:error", + "--vars", + '{"enabled_direct": True, "severity_direct": "ERROR"}', + ], + expect_pass=False, + ) + assert len(results) == 2 + assert results[0].status == "fail" + assert results[1].status == "fail" diff --git a/tests/functional/configs/test_unused_configs.py b/tests/functional/configs/test_unused_configs.py new file mode 100644 index 000000000..a01ebc01c --- /dev/null +++ b/tests/functional/configs/test_unused_configs.py @@ -0,0 +1,52 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +seeds__seed_csv = """id,value +4,2 +""" + + +class TestUnusedModelConfigs: + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": seeds__seed_csv} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "test-paths": ["does-not-exist"], + "models": { + "test": { + "enabled": True, + } + }, + "seeds": { + "quote_columns": False, + }, + "sources": { + "test": { + "enabled": True, + } + }, + "data_tests": { + "test": { + "enabled": True, + } + }, + } + + def test_warn_unused_configuration_paths( + self, + project, + ): + with pytest.raises(CompilationError) as excinfo: + run_dbt(["--warn-error", "seed"]) + + assert "Configuration paths exist" in str(excinfo.value) + assert "- sources.test" in str(excinfo.value) + assert "- models.test" in str(excinfo.value) + assert "- models.test" in str(excinfo.value) + + run_dbt(["seed"]) diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py new file mode 100644 index 000000000..7620a954a --- /dev/null +++ b/tests/functional/conftest.py @@ -0,0 +1,11 @@ +import pytest + +from tests.functional.projects import dbt_integration + + +pytest_plugins = ["dbt.tests.fixtures.project"] + + +@pytest.fixture(scope="class") +def dbt_integration_project(): + return dbt_integration() diff --git a/tests/functional/context_methods/first_dependency.py b/tests/functional/context_methods/first_dependency.py new file mode 100644 index 000000000..8e1365be9 --- /dev/null +++ b/tests/functional/context_methods/first_dependency.py @@ -0,0 +1,95 @@ +from dbt.tests.fixtures.project import write_project_files +import pytest + + +first_dependency__dbt_project_yml = """ +name: 'first_dep' +version: '1.0' +config-version: 2 + +profile: 'default' + +model-paths: ["models"] +analysis-paths: ["analyses"] +test-paths: ["tests"] +seed-paths: ["seeds"] +macro-paths: ["macros"] + +require-dbt-version: '>=0.1.0' + +target-path: "target" # directory which will store compiled SQL files +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_packages" + +vars: + first_dep: + first_dep_global: 'first_dep_global_value_overridden' + test_config_root_override: 'configured_from_dependency' + test_config_package: 'configured_from_dependency' + +seeds: + quote_columns: True + +""" + +first_dependency__models__nested__first_dep_model_sql = """ +select + '{{ var("first_dep_global") }}' as first_dep_global, + '{{ var("from_root_to_first") }}' as from_root +""" + +first_dependency__seeds__first_dep_expected_csv = """first_dep_global,from_root +first_dep_global_value_overridden,root_first_value +""" + +first_dependency__models__nested__first_dep_model_var_expected_csv = """test_config_root_override,test_config_package +configured_from_root,configured_from_dependency +""" + +first_dependency__models__nested__first_dep_model_var_sql = """ +select + '{{ config.get("test_config_root_override") }}' as test_config_root_override, + '{{ config.get("test_config_package") }}' as test_config_package +""" + +first_dependency__model_var_in_config_schema = """ +models: +- name: first_dep_model + config: + test_config_root_override: "{{ var('test_config_root_override') }}" + test_config_package: "{{ var('test_config_package') }}" +""" + + +class FirstDependencyProject: + @pytest.fixture(scope="class") + def first_dependency(self, project): + first_dependency_files = { + "dbt_project.yml": first_dependency__dbt_project_yml, + "models": { + "nested": { + "first_dep_model.sql": first_dependency__models__nested__first_dep_model_sql + } + }, + "seeds": {"first_dep_expected.csv": first_dependency__seeds__first_dep_expected_csv}, + } + write_project_files(project.project_root, "first_dependency", first_dependency_files) + + +class FirstDependencyConfigProject: + @pytest.fixture(scope="class") + def first_dependency(self, project): + first_dependency_files = { + "dbt_project.yml": first_dependency__dbt_project_yml, + "models": { + "nested": { + "first_dep_model.sql": first_dependency__models__nested__first_dep_model_var_sql, + "schema.yml": first_dependency__model_var_in_config_schema, + } + }, + "seeds": { + "first_dep_expected.csv": first_dependency__models__nested__first_dep_model_var_expected_csv + }, + } + write_project_files(project.project_root, "first_dependency", first_dependency_files) diff --git a/tests/functional/context_methods/test_builtin_functions.py b/tests/functional/context_methods/test_builtin_functions.py new file mode 100644 index 000000000..1bec64c9e --- /dev/null +++ b/tests/functional/context_methods/test_builtin_functions.py @@ -0,0 +1,163 @@ +import json +import os + +from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file +from dbt_common.exceptions import CompilationError +import pytest + + +macros__validate_set_sql = """ +{% macro validate_set() %} + {% set set_result = set([1, 2, 2, 3, 'foo', False]) %} + {{ log("set_result: " ~ set_result) }} + {% set set_strict_result = set_strict([1, 2, 2, 3, 'foo', False]) %} + {{ log("set_strict_result: " ~ set_strict_result) }} +{% endmacro %} +""" + +macros__validate_zip_sql = """ +{% macro validate_zip() %} + {% set list_a = [1, 2] %} + {% set list_b = ['foo', 'bar'] %} + {% set zip_result = zip(list_a, list_b) | list %} + {{ log("zip_result: " ~ zip_result) }} + {% set zip_strict_result = zip_strict(list_a, list_b) | list %} + {{ log("zip_strict_result: " ~ zip_strict_result) }} +{% endmacro %} +""" + +macros__validate_invocation_sql = """ +{% macro validate_invocation(my_variable) %} + -- check a specific value + {{ log("use_colors: "~ invocation_args_dict['use_colors']) }} + -- whole dictionary (as string) + {{ log("invocation_result: "~ invocation_args_dict) }} +{% endmacro %} +""" + +macros__validate_dbt_metadata_envs_sql = """ +{% macro validate_dbt_metadata_envs() %} + {{ log("dbt_metadata_envs_result:"~ dbt_metadata_envs) }} +{% endmacro %} +""" + +models__set_exception_sql = """ +{% set set_strict_result = set_strict(1) %} +""" + +models__zip_exception_sql = """ +{% set zip_strict_result = zip_strict(1) %} +""" + + +def parse_json_logs(json_log_output): + parsed_logs = [] + for line in json_log_output.split("\n"): + try: + log = json.loads(line) + except ValueError: + continue + + parsed_logs.append(log) + + return parsed_logs + + +def find_result_in_parsed_logs(parsed_logs, result_name): + return next( + ( + item["data"]["msg"] + for item in parsed_logs + if result_name in item["data"].get("msg", "msg") + ), + False, + ) + + +class TestContextBuiltins: + @pytest.fixture(scope="class") + def macros(self): + return { + "validate_set.sql": macros__validate_set_sql, + "validate_zip.sql": macros__validate_zip_sql, + "validate_invocation.sql": macros__validate_invocation_sql, + "validate_dbt_metadata_envs.sql": macros__validate_dbt_metadata_envs_sql, + } + + def test_builtin_set_function(self, project): + _, log_output = run_dbt_and_capture(["--debug", "run-operation", "validate_set"]) + + # The order of the set isn't guaranteed so we can't check for the actual set in the logs + assert "set_result: " in log_output + assert "False" in log_output + assert "set_strict_result: " in log_output + + def test_builtin_zip_function(self, project): + _, log_output = run_dbt_and_capture(["--debug", "run-operation", "validate_zip"]) + + expected_zip = [(1, "foo"), (2, "bar")] + assert f"zip_result: {expected_zip}" in log_output + assert f"zip_strict_result: {expected_zip}" in log_output + + def test_builtin_invocation_args_dict_function(self, project): + _, log_output = run_dbt_and_capture( + [ + "--debug", + "--log-format=json", + "run-operation", + "validate_invocation", + "--args", + "{my_variable: test_variable}", + ] + ) + + parsed_logs = parse_json_logs(log_output) + use_colors = result = find_result_in_parsed_logs(parsed_logs, "use_colors") + assert use_colors == "use_colors: True" + invocation_dict = find_result_in_parsed_logs(parsed_logs, "invocation_result") + assert result + # The result should include a dictionary of all flags with values that aren't None + expected = ( + "'send_anonymous_usage_stats': False", + "'quiet': False", + "'print': True", + "'cache_selected_only': False", + "'macro': 'validate_invocation'", + "'args': {'my_variable': 'test_variable'}", + "'which': 'run-operation'", + "'indirect_selection': 'eager'", + ) + assert all(element in invocation_dict for element in expected) + + def test_builtin_dbt_metadata_envs_function(self, project, monkeypatch): + envs = { + "DBT_ENV_CUSTOM_ENV_RUN_ID": "1234", + "DBT_ENV_CUSTOM_ENV_JOB_ID": "5678", + "DBT_ENV_RUN_ID": "91011", + "RANDOM_ENV": "121314", + } + monkeypatch.setattr(os, "environ", envs) + + _, log_output = run_dbt_and_capture( + ["--debug", "--log-format=json", "run-operation", "validate_dbt_metadata_envs"] + ) + + parsed_logs = parse_json_logs(log_output) + result = find_result_in_parsed_logs(parsed_logs, "dbt_metadata_envs_result") + + assert result + + expected = "dbt_metadata_envs_result:{'RUN_ID': '1234', 'JOB_ID': '5678'}" + assert expected in str(result) + + +class TestContextBuiltinExceptions: + # Assert compilation errors are raised with _strict equivalents + def test_builtin_function_exception(self, project): + write_file(models__set_exception_sql, project.project_root, "models", "raise.sql") + with pytest.raises(CompilationError): + run_dbt(["compile"]) + + write_file(models__zip_exception_sql, project.project_root, "models", "raise.sql") + with pytest.raises(CompilationError): + run_dbt(["compile"]) diff --git a/tests/functional/context_methods/test_cli_var_override.py b/tests/functional/context_methods/test_cli_var_override.py new file mode 100644 index 000000000..d47c33f6e --- /dev/null +++ b/tests/functional/context_methods/test_cli_var_override.py @@ -0,0 +1,67 @@ +from dbt.tests.util import run_dbt +import pytest + + +models_override__schema_yml = """ +version: 2 +models: +- name: test_vars + columns: + - name: field + data_tests: + - accepted_values: + values: + - override +""" + +models_override__test_vars_sql = """ +select '{{ var("required") }}'::varchar as field +""" + + +# Tests that cli vars override vars set in the project config +class TestCLIVarOverride: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_override__schema_yml, + "test_vars.sql": models_override__test_vars_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "vars": { + "required": "present", + }, + } + + def test__override_vars_global(self, project): + run_dbt(["run", "--vars", "{required: override}"]) + run_dbt(["test"]) + + +# This one switches to setting a var in 'test' +class TestCLIVarOverridePorject: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_override__schema_yml, + "test_vars.sql": models_override__test_vars_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "vars": { + "test": { + "required": "present", + }, + }, + } + + def test__override_vars_project_level(self, project): + + # This should be "override" + run_dbt(["run", "--vars", "{required: override}"]) + run_dbt(["test"]) diff --git a/tests/functional/context_methods/test_cli_vars.py b/tests/functional/context_methods/test_cli_vars.py new file mode 100644 index 000000000..8f6d6e8d5 --- /dev/null +++ b/tests/functional/context_methods/test_cli_vars.py @@ -0,0 +1,205 @@ +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import get_artifact, run_dbt, write_config_file +from dbt_common.exceptions import CompilationError, DbtRuntimeError +import pytest +import yaml + + +models_complex__schema_yml = """ +version: 2 +models: +- name: complex_model + columns: + - name: var_1 + data_tests: + - accepted_values: + values: + - abc + - name: var_2 + data_tests: + - accepted_values: + values: + - def + - name: var_3 + data_tests: + - accepted_values: + values: + - jkl +""" + +models_complex__complex_model_sql = """ +select + '{{ var("variable_1") }}'::varchar as var_1, + '{{ var("variable_2")[0] }}'::varchar as var_2, + '{{ var("variable_3")["value"] }}'::varchar as var_3 +""" + +models_simple__schema_yml = """ +version: 2 +models: +- name: simple_model + columns: + - name: simple + data_tests: + - accepted_values: + values: + - abc +""" + +models_simple__simple_model_sql = """ +select + '{{ var("simple") }}'::varchar as simple +""" + +really_simple_model_sql = """ +select 'abc' as simple +""" + + +class TestCLIVars: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_complex__schema_yml, + "complex_model.sql": models_complex__complex_model_sql, + } + + def test__cli_vars_longform(self, project): + cli_vars = { + "variable_1": "abc", + "variable_2": ["def", "ghi"], + "variable_3": {"value": "jkl"}, + } + results = run_dbt(["run", "--vars", yaml.dump(cli_vars)]) + assert len(results) == 1 + results = run_dbt(["test", "--vars", yaml.dump(cli_vars)]) + assert len(results) == 3 + + +class TestCLIVarsSimple: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_simple__schema_yml, + "simple_model.sql": models_simple__simple_model_sql, + } + + def test__cli_vars_shorthand(self, project): + results = run_dbt(["run", "--vars", "simple: abc"]) + assert len(results) == 1 + results = run_dbt(["test", "--vars", "simple: abc"]) + assert len(results) == 1 + + def test__cli_vars_longer(self, project): + results = run_dbt(["run", "--vars", "{simple: abc, unused: def}"]) + assert len(results) == 1 + results = run_dbt(["test", "--vars", "{simple: abc, unused: def}"]) + assert len(results) == 1 + run_results = get_artifact(project.project_root, "target", "run_results.json") + assert run_results["args"]["vars"] == {"simple": "abc", "unused": "def"} + + +class TestCLIVarsProfile: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_simple__schema_yml, + "simple_model.sql": really_simple_model_sql, + } + + def test_cli_vars_in_profile(self, project, dbt_profile_data): + profile = dbt_profile_data + profile["test"]["outputs"]["default"]["host"] = "{{ var('db_host') }}" + write_config_file(profile, project.profiles_dir, "profiles.yml") + with pytest.raises(DbtRuntimeError): + results = run_dbt(["run"]) + results = run_dbt(["run", "--vars", "db_host: localhost"]) + assert len(results) == 1 + + +class TestCLIVarsPackages: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root, dbt_integration_project): # noqa: F811 + write_project_files(project_root, "dbt_integration_project", dbt_integration_project) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_simple__schema_yml, + "simple_model.sql": really_simple_model_sql, + } + + @pytest.fixture(scope="class") + def packages_config(self): + return {"packages": [{"local": "dbt_integration_project"}]} + + def test_cli_vars_in_packages(self, project, packages_config): + # Run working deps and run commands + run_dbt(["deps"]) + results = run_dbt(["run"]) + assert len(results) == 1 + + # Change packages.yml to contain a var + packages = packages_config + packages["packages"][0]["local"] = "{{ var('path_to_project') }}" + write_config_file(packages, project.project_root, "packages.yml") + + # Without vars args deps fails + with pytest.raises(DbtRuntimeError): + run_dbt(["deps"]) + + # With vars arg deps succeeds + results = run_dbt(["deps", "--vars", "path_to_project: dbt_integration_project"]) + assert results is None + + +initial_selectors_yml = """ +selectors: + - name: dev_defer_snapshots + default: "{{ target.name == 'dev' | as_bool }}" + definition: + method: fqn + value: '*' + exclude: + - method: config.materialized + value: snapshot +""" + +var_selectors_yml = """ +selectors: + - name: dev_defer_snapshots + default: "{{ var('snapshot_target') == 'dev' | as_bool }}" + definition: + method: fqn + value: '*' + exclude: + - method: config.materialized + value: snapshot +""" + + +class TestCLIVarsSelectors: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_simple__schema_yml, + "simple_model.sql": really_simple_model_sql, + } + + @pytest.fixture(scope="class") + def selectors(self): + return initial_selectors_yml + + def test_vars_in_selectors(self, project): + # initially runs ok + results = run_dbt(["run"]) + assert len(results) == 1 + + # Update the selectors.yml file to have a var + write_config_file(var_selectors_yml, project.project_root, "selectors.yml") + with pytest.raises(CompilationError): + run_dbt(["run"]) + + # Var in cli_vars works + results = run_dbt(["run", "--vars", "snapshot_target: dev"]) + assert len(results) == 1 diff --git a/tests/functional/context_methods/test_custom_env_vars.py b/tests/functional/context_methods/test_custom_env_vars.py new file mode 100644 index 000000000..93de2b664 --- /dev/null +++ b/tests/functional/context_methods/test_custom_env_vars.py @@ -0,0 +1,35 @@ +import json +import os + +from dbt.tests.util import run_dbt_and_capture +import pytest + + +def parse_json_logs(json_log_output): + parsed_logs = [] + for line in json_log_output.split("\n"): + try: + log = json.loads(line) + except ValueError: + continue + + parsed_logs.append(log) + + return parsed_logs + + +class TestCustomVarInLogs: + @pytest.fixture(scope="class", autouse=True) + def setup(self): + # on windows, python uppercases env var names because windows is case insensitive + os.environ["DBT_ENV_CUSTOM_ENV_SOME_VAR"] = "value" + yield + del os.environ["DBT_ENV_CUSTOM_ENV_SOME_VAR"] + + def test_extra_filled(self, project): + _, log_output = run_dbt_and_capture( + ["--log-format=json", "deps"], + ) + logs = parse_json_logs(log_output) + for log in logs: + assert log["info"].get("extra") == {"SOME_VAR": "value"} diff --git a/tests/functional/context_methods/test_env_vars.py b/tests/functional/context_methods/test_env_vars.py new file mode 100644 index 000000000..e852199d8 --- /dev/null +++ b/tests/functional/context_methods/test_env_vars.py @@ -0,0 +1,193 @@ +import os + +from dbt.constants import DEFAULT_ENV_PLACEHOLDER, SECRET_ENV_PREFIX +from dbt.tests.util import get_manifest, run_dbt, run_dbt_and_capture +import pytest + + +context_sql = """ + +{{ + config( + materialized='table' + ) +}} + +select + + -- compile-time variables + '{{ this }}' as "this", + '{{ this.name }}' as "this.name", + '{{ this.schema }}' as "this.schema", + '{{ this.table }}' as "this.table", + + '{{ target.dbname }}' as "target.dbname", + '{{ target.host }}' as "target.host", + '{{ target.name }}' as "target.name", + '{{ target.schema }}' as "target.schema", + '{{ target.type }}' as "target.type", + '{{ target.user }}' as "target.user", + '{{ target.get("pass", "") }}' as "target.pass", -- not actually included, here to test that it is _not_ present! + {{ target.port }} as "target.port", + {{ target.threads }} as "target.threads", + + -- runtime variables + '{{ run_started_at }}' as run_started_at, + '{{ invocation_id }}' as invocation_id, + '{{ thread_id }}' as thread_id, + + '{{ env_var("DBT_TEST_ENV_VAR") }}' as env_var, + '{{ env_var("DBT_TEST_IGNORE_DEFAULT", "ignored_default_val") }}' as env_var_ignore_default, + '{{ env_var("DBT_TEST_USE_DEFAULT", "use_my_default_val") }}' as env_var_use_default, + 'secret_variable' as env_var_secret, -- make sure the value itself is scrubbed from the logs + '{{ env_var("DBT_TEST_NOT_SECRET") }}' as env_var_not_secret + +""" + + +class TestEnvVars: + @pytest.fixture(scope="class") + def models(self): + return {"context.sql": context_sql} + + @pytest.fixture(scope="class", autouse=True) + def setup(self): + os.environ["DBT_TEST_ENV_VAR"] = "1" + os.environ["DBT_TEST_USER"] = "root" + os.environ["DBT_TEST_PASS"] = "password" + os.environ[SECRET_ENV_PREFIX + "SECRET"] = "secret_variable" + os.environ["DBT_TEST_NOT_SECRET"] = "regular_variable" + os.environ["DBT_TEST_IGNORE_DEFAULT"] = "ignored_default" + yield + del os.environ["DBT_TEST_ENV_VAR"] + del os.environ["DBT_TEST_USER"] + del os.environ[SECRET_ENV_PREFIX + "SECRET"] + del os.environ["DBT_TEST_NOT_SECRET"] + del os.environ["DBT_TEST_IGNORE_DEFAULT"] + + @pytest.fixture(scope="class") + def profiles_config_update(self, unique_schema): + return { + "test": { + "outputs": { + # don't use env_var's here so the integration tests can run + # seed sql statements and the like. default target is used + "dev": { + "type": "postgres", + "threads": 1, + "host": "localhost", + "port": 5432, + "user": "root", + "pass": "password", + "dbname": "dbt", + "schema": unique_schema, + }, + "prod": { + "type": "postgres", + "threads": 1, + "host": "localhost", + "port": 5432, + # root/password + "user": "{{ env_var('DBT_TEST_USER') }}", + "pass": "{{ env_var('DBT_TEST_PASS') }}", + "dbname": "dbt", + "schema": unique_schema, + }, + }, + "target": "dev", + } + } + + def get_ctx_vars(self, project): + fields = [ + "this", + "this.name", + "this.schema", + "this.table", + "target.dbname", + "target.host", + "target.name", + "target.port", + "target.schema", + "target.threads", + "target.type", + "target.user", + "target.pass", + "run_started_at", + "invocation_id", + "thread_id", + "env_var", + ] + field_list = ", ".join(['"{}"'.format(f) for f in fields]) + query = "select {field_list} from {schema}.context".format( + field_list=field_list, schema=project.test_schema + ) + vals = project.run_sql(query, fetch="all") + ctx = dict([(k, v) for (k, v) in zip(fields, vals[0])]) + return ctx + + def test_env_vars_dev( + self, + project, + ): + results = run_dbt(["run"]) + assert len(results) == 1 + ctx = self.get_ctx_vars(project) + + manifest = get_manifest(project.project_root) + expected = { + "DBT_TEST_ENV_VAR": "1", + "DBT_TEST_NOT_SECRET": "regular_variable", + "DBT_TEST_IGNORE_DEFAULT": "ignored_default", + "DBT_TEST_USE_DEFAULT": DEFAULT_ENV_PLACEHOLDER, + } + assert manifest.env_vars == expected + + this = '"{}"."{}"."context"'.format(project.database, project.test_schema) + assert ctx["this"] == this + + assert ctx["this.name"] == "context" + assert ctx["this.schema"] == project.test_schema + assert ctx["this.table"] == "context" + + assert ctx["target.dbname"] == "dbt" + assert ctx["target.host"] == "localhost" + assert ctx["target.name"] == "dev" + assert ctx["target.port"] == 5432 + assert ctx["target.schema"] == project.test_schema + assert ctx["target.threads"] == 1 + assert ctx["target.type"] == "postgres" + assert ctx["target.user"] == "root" + assert ctx["target.pass"] == "" + + assert ctx["env_var"] == "1" + + def test_env_vars_prod(self, project): + results = run_dbt(["run", "--target", "prod"]) + assert len(results) == 1 + ctx = self.get_ctx_vars(project) + + this = '"{}"."{}"."context"'.format(project.database, project.test_schema) + assert ctx["this"] == this + + assert ctx["this.name"] == "context" + assert ctx["this.schema"] == project.test_schema + assert ctx["this.table"] == "context" + + assert ctx["target.dbname"] == "dbt" + assert ctx["target.host"] == "localhost" + assert ctx["target.name"] == "prod" + assert ctx["target.port"] == 5432 + assert ctx["target.schema"] == project.test_schema + assert ctx["target.threads"] == 1 + assert ctx["target.type"] == "postgres" + assert ctx["target.user"] == "root" + assert ctx["target.pass"] == "" + assert ctx["env_var"] == "1" + + def test_env_vars_secrets(self, project): + os.environ["DBT_DEBUG"] = "True" + _, log_output = run_dbt_and_capture(["run", "--target", "prod"]) + + assert not ("secret_variable" in log_output) + assert "regular_variable" in log_output diff --git a/tests/functional/context_methods/test_secret_env_vars.py b/tests/functional/context_methods/test_secret_env_vars.py new file mode 100644 index 000000000..b4949d72a --- /dev/null +++ b/tests/functional/context_methods/test_secret_env_vars.py @@ -0,0 +1,184 @@ +import os + +from dbt.constants import SECRET_ENV_PREFIX +from dbt.exceptions import ParsingError +from dbt.tests.util import read_file, run_dbt, run_dbt_and_capture +from dbt_common.exceptions import DbtInternalError +import pytest + +from first_dependency import FirstDependencyProject + + +secret_bad__context_sql = """ + +{{ + config( + materialized='table' + ) +}} + +select + + '{{ env_var("DBT_TEST_ENV_VAR") }}' as env_var, + '{{ env_var("DBT_ENV_SECRET_SECRET") }}' as env_var_secret, -- this should raise an error! + '{{ env_var("DBT_TEST_NOT_SECRET") }}' as env_var_not_secret + +""" + + +class TestDisallowSecretModel: + @pytest.fixture(scope="class") + def models(self): + return {"context.sql": secret_bad__context_sql} + + def test_disallow_secret(self, project): + with pytest.raises(ParsingError): + run_dbt(["compile"]) + + +models__context_sql = """ +{{ + config( + materialized='table' + ) +}} + +select + + -- compile-time variables + '{{ this }}' as "this", + '{{ this.name }}' as "this.name", + '{{ this.schema }}' as "this.schema", + '{{ this.table }}' as "this.table", + + '{{ target.dbname }}' as "target.dbname", + '{{ target.host }}' as "target.host", + '{{ target.name }}' as "target.name", + '{{ target.schema }}' as "target.schema", + '{{ target.type }}' as "target.type", + '{{ target.user }}' as "target.user", + '{{ target.get("pass", "") }}' as "target.pass", -- not actually included, here to test that it is _not_ present! + {{ target.port }} as "target.port", + {{ target.threads }} as "target.threads", + + -- runtime variables + '{{ run_started_at }}' as run_started_at, + '{{ invocation_id }}' as invocation_id, + '{{ thread_id }}' as thread_id, + + '{{ env_var("DBT_TEST_ENV_VAR") }}' as env_var, + 'secret_variable' as env_var_secret, -- make sure the value itself is scrubbed from the logs + '{{ env_var("DBT_TEST_NOT_SECRET") }}' as env_var_not_secret +""" + + +class TestAllowSecretProfilePackage(FirstDependencyProject): + @pytest.fixture(scope="class", autouse=True) + def setup(self): + os.environ[SECRET_ENV_PREFIX + "USER"] = "root" + os.environ[SECRET_ENV_PREFIX + "PASS"] = "password" + os.environ[SECRET_ENV_PREFIX + "PACKAGE"] = "first_dependency" + os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] = "abc123" + yield + del os.environ[SECRET_ENV_PREFIX + "USER"] + del os.environ[SECRET_ENV_PREFIX + "PASS"] + del os.environ[SECRET_ENV_PREFIX + "PACKAGE"] + del os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] + + @pytest.fixture(scope="class") + def models(self): + return {"context.sql": models__context_sql} + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + # the raw value of this secret *will* be written to lock file + "local": "{{ env_var('DBT_ENV_SECRET_PACKAGE') }}" + }, + { + # this secret env var will *not* be written to lock file + "git": "https://{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}@github.com/dbt-labs/dbt-external-tables.git" + }, + { + # this secret env var will *not* be written to lock file + "tarball": "https://{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}@github.com/dbt-labs/dbt-utils/archive/refs/tags/1.1.1.tar.gz", + "name": "dbt_utils", + }, + ] + } + + @pytest.fixture(scope="class") + def profile_target(self): + return { + "type": "postgres", + "threads": 1, + "host": "localhost", + "port": 5432, + # root/password + "user": "{{ env_var('DBT_ENV_SECRET_USER') }}", + "pass": "{{ env_var('DBT_ENV_SECRET_PASS') }}", + "dbname": "dbt", + } + + def test_allow_secrets(self, project, first_dependency): + _, log_output = run_dbt_and_capture(["deps"]) + lock_file_contents = read_file("package-lock.yml") + + # this will not be written to logs or lock file + assert not ("abc123" in log_output) + assert not ("abc123" in lock_file_contents) + assert "{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}" in lock_file_contents + + # this will be scrubbed from logs, but not from the lock file + assert not ("first_dependency" in log_output) + assert "first_dependency" in lock_file_contents + + +class TestCloneFailSecretScrubbed: + @pytest.fixture(scope="class", autouse=True) + def setup(self): + os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] = "abc123" + + @pytest.fixture(scope="class") + def models(self): + return {"context.sql": models__context_sql} + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://fakeuser:{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}@github.com/dbt-labs/fake-repo.git" + }, + ] + } + + def test_fail_clone_with_scrubbing(self, project): + with pytest.raises(DbtInternalError) as excinfo: + _, log_output = run_dbt_and_capture(["deps"]) + + assert "abc123" not in str(excinfo.value) + + +class TestCloneFailSecretNotRendered(TestCloneFailSecretScrubbed): + # as above, with some Jinja manipulation + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://fakeuser:{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') | join(' ') }}@github.com/dbt-labs/fake-repo.git" + }, + ] + } + + def test_fail_clone_with_scrubbing(self, project): + with pytest.raises(DbtInternalError) as excinfo: + _, log_output = run_dbt_and_capture(["deps"]) + + # we should not see any manipulated form of the secret value (abc123) here + # we should see a manipulated form of the placeholder instead + assert "a b c 1 2 3" not in str(excinfo.value) + assert "D B T _ E N V _ S E C R E T _ G I T _ T O K E N" in str(excinfo.value) diff --git a/tests/functional/context_methods/test_var_dependency.py b/tests/functional/context_methods/test_var_dependency.py new file mode 100644 index 000000000..e6c1a501c --- /dev/null +++ b/tests/functional/context_methods/test_var_dependency.py @@ -0,0 +1,82 @@ +from dbt.tests.util import check_relations_equal, run_dbt +import pytest + +from first_dependency import ( + FirstDependencyConfigProject, + FirstDependencyProject, +) + + +dependency_seeds__root_model_expected_csv = """first_dep_global,from_root +dep_never_overridden,root_root_value +""" + +dependency_models__inside__model_sql = """ +select + '{{ var("first_dep_override") }}' as first_dep_global, + '{{ var("from_root_to_root") }}' as from_root + +""" + + +class TestVarDependencyInheritance(FirstDependencyProject): + @pytest.fixture(scope="class") + def seeds(self): + return {"root_model_expected.csv": dependency_seeds__root_model_expected_csv} + + @pytest.fixture(scope="class") + def models(self): + return {"inside": {"model.sql": dependency_models__inside__model_sql}} + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + {"local": "first_dependency"}, + ] + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "vars": { + "first_dep_override": "dep_never_overridden", + "test": { + "from_root_to_root": "root_root_value", + }, + "first_dep": { + "from_root_to_first": "root_first_value", + }, + }, + } + + def test_var_mutual_overrides_v1_conversion(self, project, first_dependency): + run_dbt(["deps"]) + assert len(run_dbt(["seed"])) == 2 + assert len(run_dbt(["run"])) == 2 + check_relations_equal(project.adapter, ["root_model_expected", "model"]) + check_relations_equal(project.adapter, ["first_dep_expected", "first_dep_model"]) + + +class TestVarConfigDependencyInheritance(FirstDependencyConfigProject): + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + {"local": "first_dependency"}, + ] + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "vars": { + "test_config_root_override": "configured_from_root", + }, + } + + def test_root_var_overrides_package_var(self, project, first_dependency): + run_dbt(["deps"]) + run_dbt(["seed"]) + assert len(run_dbt(["run"])) == 1 + check_relations_equal(project.adapter, ["first_dep_expected", "first_dep_model"]) diff --git a/tests/functional/context_methods/test_var_in_generate_name.py b/tests/functional/context_methods/test_var_in_generate_name.py new file mode 100644 index 000000000..f36bec3a8 --- /dev/null +++ b/tests/functional/context_methods/test_var_in_generate_name.py @@ -0,0 +1,43 @@ +from dbt.tests.util import run_dbt, update_config_file +from dbt_common.exceptions import CompilationError +import pytest + + +model_sql = """ +select 1 as id +""" + +bad_generate_macros__generate_names_sql = """ +{% macro generate_schema_name(custom_schema_name, node) -%} + {% do var('somevar') %} + {% do return(dbt.generate_schema_name(custom_schema_name, node)) %} +{%- endmacro %} + +""" + + +class TestMissingVarGenerateNameMacro: + @pytest.fixture(scope="class") + def macros(self): + return {"generate_names.sql": bad_generate_macros__generate_names_sql} + + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": model_sql} + + def test_generate_schema_name_var(self, project): + # var isn't set, so generate_name macro fails + with pytest.raises(CompilationError) as excinfo: + run_dbt(["compile"]) + + assert "Required var 'somevar' not found in config" in str(excinfo.value) + + # globally scoped -- var is set at top-level + update_config_file({"vars": {"somevar": 1}}, project.project_root, "dbt_project.yml") + run_dbt(["compile"]) + + # locally scoped -- var is set in 'test' scope + update_config_file( + {"vars": {"test": {"somevar": 1}}}, project.project_root, "dbt_project.yml" + ) + run_dbt(["compile"]) diff --git a/tests/functional/context_methods/test_yaml_functions.py b/tests/functional/context_methods/test_yaml_functions.py new file mode 100644 index 000000000..8996abc93 --- /dev/null +++ b/tests/functional/context_methods/test_yaml_functions.py @@ -0,0 +1,49 @@ +from dbt.tests.util import run_dbt +import pytest + + +tests__from_yaml_sql = """ +{% set simplest = (fromyaml('a: 1') == {'a': 1}) %} +{% set nested_data %} +a: + b: + - c: 1 + d: 2 + - c: 3 + d: 4 +{% endset %} +{% set nested = (fromyaml(nested_data) == {'a': {'b': [{'c': 1, 'd': 2}, {'c': 3, 'd': 4}]}}) %} + +(select 'simplest' as name {% if simplest %}limit 0{% endif %}) +union all +(select 'nested' as name {% if nested %}limit 0{% endif %}) +""" + +tests__to_yaml_sql = """ +{% set simplest = (toyaml({'a': 1}) == 'a: 1\\n') %} +{% set default_sort = (toyaml({'b': 2, 'a': 1}) == 'b: 2\\na: 1\\n') %} +{% set unsorted = (toyaml({'b': 2, 'a': 1}, sort_keys=False) == 'b: 2\\na: 1\\n') %} +{% set sorted = (toyaml({'b': 2, 'a': 1}, sort_keys=True) == 'a: 1\\nb: 2\\n') %} +{% set default_results = (toyaml({'a': adapter}, 'failed') == 'failed') %} + +(select 'simplest' as name {% if simplest %}limit 0{% endif %}) +union all +(select 'default_sort' as name {% if default_sort %}limit 0{% endif %}) +union all +(select 'unsorted' as name {% if unsorted %}limit 0{% endif %}) +union all +(select 'sorted' as name {% if sorted %}limit 0{% endif %}) +union all +(select 'default_results' as name {% if default_results %}limit 0{% endif %}) +""" + + +class TestContextVars: + # This test has no actual models + + @pytest.fixture(scope="class") + def tests(self): + return {"from_yaml.sql": tests__from_yaml_sql, "to_yaml.sql": tests__to_yaml_sql} + + def test_json_data_tests(self, project): + assert len(run_dbt(["test"])) == 2 diff --git a/tests/functional/contracts/test_contract_enforcement.py b/tests/functional/contracts/test_contract_enforcement.py new file mode 100644 index 000000000..79b5fd1d5 --- /dev/null +++ b/tests/functional/contracts/test_contract_enforcement.py @@ -0,0 +1,44 @@ +from dbt.tests.util import run_dbt, write_file +import pytest + + +my_model_sql = """ +select 'some string' as string_column +""" + +my_model_int_sql = """ +select 123 as int_column +""" + +model_schema_yml = """ +models: + - name: my_model + config: + materialized: incremental + on_schema_change: append_new_columns + contract: {enforced: true} + columns: + - name: string_column + data_type: text +""" + + +class TestIncrementalModelContractEnforcement: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "schema.yml": model_schema_yml, + } + + def test_contracted_incremental(self, project): + results = run_dbt() + assert len(results) == 1 + # now update the column type in the model to break the contract + write_file(my_model_int_sql, project.project_root, "models", "my_model.sql") + + expected_msg = "This model has an enforced contract that failed." + results = run_dbt(expect_pass=False) + assert len(results) == 1 + msg = results[0].message + assert expected_msg in msg diff --git a/tests/functional/contracts/test_contract_precision.py b/tests/functional/contracts/test_contract_precision.py new file mode 100644 index 000000000..d1c71ba18 --- /dev/null +++ b/tests/functional/contracts/test_contract_precision.py @@ -0,0 +1,63 @@ +from dbt.tests.util import run_dbt_and_capture +import pytest + + +my_numeric_model_sql = """ +select + 1.234 as non_integer +""" + +model_schema_numerics_yml = """ +version: 2 +models: + - name: my_numeric_model + config: + contract: + enforced: true + columns: + - name: non_integer + data_type: numeric +""" + +model_schema_numerics_precision_yml = """ +version: 2 +models: + - name: my_numeric_model + config: + contract: + enforced: true + columns: + - name: non_integer + data_type: numeric(38,3) +""" + + +class TestModelContractNumericNoPrecision: + @pytest.fixture(scope="class") + def models(self): + return { + "my_numeric_model.sql": my_numeric_model_sql, + "schema.yml": model_schema_numerics_yml, + } + + def test_contracted_numeric_without_precision(self, project): + expected_msg = "Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: ['non_integer']" + _, logs = run_dbt_and_capture(["run"], expect_pass=True) + assert expected_msg in logs + _, logs = run_dbt_and_capture(["--warn-error", "run"], expect_pass=False) + assert "Compilation Error in model my_numeric_model" in logs + assert expected_msg in logs + + +class TestModelContractNumericPrecision: + @pytest.fixture(scope="class") + def models(self): + return { + "my_numeric_model.sql": my_numeric_model_sql, + "schema.yml": model_schema_numerics_precision_yml, + } + + def test_contracted_numeric_with_precision(self, project): + expected_msg = "Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: ['non_integer']" + _, logs = run_dbt_and_capture(["run"], expect_pass=True) + assert expected_msg not in logs diff --git a/tests/functional/contracts/test_nonstandard_data_type.py b/tests/functional/contracts/test_nonstandard_data_type.py new file mode 100644 index 000000000..4233747fb --- /dev/null +++ b/tests/functional/contracts/test_nonstandard_data_type.py @@ -0,0 +1,76 @@ +from dbt.tests.util import run_dbt, run_dbt_and_capture +import pytest + + +my_numeric_model_sql = """ +select + 12.34 as price +""" + +my_money_model_sql = """ +select + cast('12.34' as money) as price +""" + +model_schema_money_yml = """ +models: + - name: my_model + config: + contract: + enforced: true + columns: + - name: price + data_type: money +""" + +model_schema_numeric_yml = """ +models: + - name: my_model + config: + contract: + enforced: true + columns: + - name: price + data_type: numeric +""" + + +class TestModelContractUnrecognizedTypeCode1: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_money_model_sql, + "schema.yml": model_schema_money_yml, + } + + def test_nonstandard_data_type(self, project): + run_dbt(["run"], expect_pass=True) + + +class TestModelContractUnrecognizedTypeCodeActualMismatch: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_money_model_sql, + "schema.yml": model_schema_numeric_yml, + } + + def test_nonstandard_data_type(self, project): + expected_msg = "unknown type_code 790 | DECIMAL | data type mismatch" + _, logs = run_dbt_and_capture(["run"], expect_pass=False) + assert expected_msg in logs + + +class TestModelContractUnrecognizedTypeCodeExpectedMismatch: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_numeric_model_sql, + "schema.yml": model_schema_money_yml, + } + + def test_nonstandard_data_type(self, project): + expected_msg = "DECIMAL | unknown type_code 790 | data type mismatch" + _, logs = run_dbt_and_capture(["run"], expect_pass=False) + print(logs) + assert expected_msg in logs diff --git a/tests/functional/custom_aliases/fixtures.py b/tests/functional/custom_aliases/fixtures.py new file mode 100644 index 000000000..dfa4d7a54 --- /dev/null +++ b/tests/functional/custom_aliases/fixtures.py @@ -0,0 +1,68 @@ +model1_sql = """ +{{ config(materialized='table', alias='alias') }} + +select {{ string_literal(this.name) }} as model_name +""" + +model2_sql = """ +{{ config(materialized='table') }} + +select {{ string_literal(this.name) }} as model_name +""" + +macros_sql = """ +{% macro generate_alias_name(custom_alias_name, node) -%} + {%- if custom_alias_name is none -%} + {{ node.name }} + {%- else -%} + custom_{{ custom_alias_name | trim }} + {%- endif -%} +{%- endmacro %} + + +{% macro string_literal(s) -%} + {{ adapter.dispatch('string_literal', macro_namespace='test')(s) }} +{%- endmacro %} + +{% macro default__string_literal(s) %} + '{{ s }}'::text +{% endmacro %} +""" + +macros_config_sql = """ +{#-- Verify that the config['alias'] key is present #} +{% macro generate_alias_name(custom_alias_name, node) -%} + {%- if custom_alias_name is none -%} + {{ node.name }} + {%- else -%} + custom_{{ node.config['alias'] if 'alias' in node.config else '' | trim }} + {%- endif -%} +{%- endmacro %} + +{% macro string_literal(s) -%} + {{ adapter.dispatch('string_literal', macro_namespace='test')(s) }} +{%- endmacro %} + +{% macro default__string_literal(s) %} + '{{ s }}'::text +{% endmacro %} +""" + +schema_yml = """ +version: 2 + +models: + - name: model1 + columns: + - name: model_name + data_tests: + - accepted_values: + values: ['custom_alias'] + - name: model2 + columns: + - name: model_name + data_tests: + - accepted_values: + values: ['model2'] + +""" diff --git a/tests/functional/custom_aliases/test_custom_aliases.py b/tests/functional/custom_aliases/test_custom_aliases.py new file mode 100644 index 000000000..9c59be773 --- /dev/null +++ b/tests/functional/custom_aliases/test_custom_aliases.py @@ -0,0 +1,31 @@ +import pytest +from dbt.tests.util import run_dbt + +import fixtures + + +class TestAliases: + @pytest.fixture(scope="class") + def models(self): + return { + "model1.sql": fixtures.model1_sql, + "model2.sql": fixtures.model2_sql, + "schema.yml": fixtures.schema_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"macros.sql": fixtures.macros_sql} + + def test_customer_alias_name(self, project): + results = run_dbt(["run"]) + assert len(results) == 2 + + results = run_dbt(["test"]) + assert len(results) == 2 + + +class TestAliasesWithConfig(TestAliases): + @pytest.fixture(scope="class") + def macros(self): + return {"macros.sql": fixtures.macros_config_sql} diff --git a/tests/functional/custom_singular_tests/data/seed_expected.sql b/tests/functional/custom_singular_tests/data/seed_expected.sql new file mode 100644 index 000000000..445c087cd --- /dev/null +++ b/tests/functional/custom_singular_tests/data/seed_expected.sql @@ -0,0 +1,113 @@ +create table {schema}.seed ( + favorite_color VARCHAR(10), + id INTEGER, + first_name VARCHAR(11), + email VARCHAR(31), + ip_address VARCHAR(15), + updated_at TIMESTAMP WITHOUT TIME ZONE +); + + +INSERT INTO {schema}.seed + (favorite_color, id, first_name, email, ip_address, updated_at) +VALUES + ('blue', 1,'Larry','lking0@miitbeian.gov.cn','69.135.206.194','2008-09-12 19:08:31'), + ('blue', 2,'Larry','lperkins1@toplist.cz','64.210.133.162','1978-05-09 04:15:14'), + ('blue', 3,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114','2011-10-16 04:07:57'), + ('blue', 4,'Sandra','sgeorge3@livejournal.com','229.235.252.98','1973-07-19 10:52:43'), + ('blue', 5,'Fred','fwoods4@google.cn','78.229.170.124','2012-09-30 16:38:29'), + ('blue', 6,'Stephen','shanson5@livejournal.com','182.227.157.105','1995-11-07 21:40:50'), + ('blue', 7,'William','wmartinez6@upenn.edu','135.139.249.50','1982-09-05 03:11:59'), + ('blue', 8,'Jessica','jlong7@hao123.com','203.62.178.210','1991-10-16 11:03:15'), + ('blue', 9,'Douglas','dwhite8@tamu.edu','178.187.247.1','1979-10-01 09:49:48'), + ('blue', 10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249','2011-05-26 07:45:49'), + ('blue', 11,'Ralph','rfieldsa@home.pl','55.152.163.149','1972-11-18 19:06:11'), + ('blue', 12,'Louise','lnicholsb@samsung.com','141.116.153.154','2014-11-25 20:56:14'), + ('blue', 13,'Clarence','cduncanc@sfgate.com','81.171.31.133','2011-11-17 07:02:36'), + ('blue', 14,'Daniel','dfranklind@omniture.com','8.204.211.37','1980-09-13 00:09:04'), + ('blue', 15,'Katherine','klanee@auda.org.au','176.96.134.59','1997-08-22 19:36:56'), + ('blue', 16,'Billy','bwardf@wikia.com','214.108.78.85','2003-10-19 02:14:47'), + ('blue', 17,'Annie','agarzag@ocn.ne.jp','190.108.42.70','1988-10-28 15:12:35'), + ('blue', 18,'Shirley','scolemanh@fastcompany.com','109.251.164.84','1988-08-24 10:50:57'), + ('blue', 19,'Roger','rfrazieri@scribd.com','38.145.218.108','1985-12-31 15:17:15'), + ('blue', 20,'Lillian','lstanleyj@goodreads.com','47.57.236.17','1970-06-08 02:09:05'), + ('blue', 21,'Aaron','arodriguezk@nps.gov','205.245.118.221','1985-10-11 23:07:49'), + ('blue', 22,'Patrick','pparkerl@techcrunch.com','19.8.100.182','2006-03-29 12:53:56'), + ('blue', 23,'Phillip','pmorenom@intel.com','41.38.254.103','2011-11-07 15:35:43'), + ('blue', 24,'Henry','hgarcian@newsvine.com','1.191.216.252','2008-08-28 08:30:44'), + ('blue', 25,'Irene','iturnero@opera.com','50.17.60.190','1994-04-01 07:15:02'), + ('blue', 26,'Andrew','adunnp@pen.io','123.52.253.176','2000-11-01 06:03:25'), + ('blue', 27,'David','dgutierrezq@wp.com','238.23.203.42','1988-01-25 07:29:18'), + ('blue', 28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185','1983-01-01 13:36:37'), + ('blue', 29,'Evelyn','epetersons@gizmodo.com','32.80.46.119','1979-07-16 17:24:12'), + ('blue', 30,'Tammy','tmitchellt@purevolume.com','249.246.167.88','2001-04-03 10:00:23'), + ('blue', 31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47','1986-02-11 21:35:50'), + ('blue', 32,'Earl','eortizv@opera.com','166.47.248.240','1996-07-06 08:16:27'), + ('blue', 33,'Juan','jgordonw@sciencedirect.com','71.77.2.200','1987-01-31 03:46:44'), + ('blue', 34,'Diane','dhowellx@nyu.edu','140.94.133.12','1994-06-11 02:30:05'), + ('blue', 35,'Randy','rkennedyy@microsoft.com','73.255.34.196','2005-05-26 20:28:39'), + ('blue', 36,'Janice','jriveraz@time.com','22.214.227.32','1990-02-09 04:16:52'), + ('blue', 37,'Laura','lperry10@diigo.com','159.148.145.73','2015-03-17 05:59:25'), + ('blue', 38,'Gary','gray11@statcounter.com','40.193.124.56','1970-01-27 10:04:51'), + ('blue', 39,'Jesse','jmcdonald12@typepad.com','31.7.86.103','2009-03-14 08:14:29'), + ('blue', 40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239','1993-05-21 14:08:54'), + ('blue', 41,'Scott','smoore14@archive.org','38.238.46.83','1980-08-30 11:16:56'), + ('blue', 42,'Phillip','pevans15@cisco.com','158.234.59.34','2011-12-15 23:26:31'), + ('blue', 43,'Steven','sriley16@google.ca','90.247.57.68','2011-10-29 19:03:28'), + ('blue', 44,'Deborah','dbrown17@hexun.com','179.125.143.240','1995-04-10 14:36:07'), + ('blue', 45,'Lori','lross18@ow.ly','64.80.162.180','1980-12-27 16:49:15'), + ('blue', 46,'Sean','sjackson19@tumblr.com','240.116.183.69','1988-06-12 21:24:45'), + ('blue', 47,'Terry','tbarnes1a@163.com','118.38.213.137','1997-09-22 16:43:19'), + ('blue', 48,'Dorothy','dross1b@ebay.com','116.81.76.49','2005-02-28 13:33:24'), + ('blue', 49,'Samuel','swashington1c@house.gov','38.191.253.40','1989-01-19 21:15:48'), + ('blue', 50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174','2007-08-11 10:21:49'), + ('green', 51,'Wayne','whudson1e@princeton.edu','90.61.24.102','1983-07-03 16:58:12'), + ('green', 52,'Rose','rjames1f@plala.or.jp','240.83.81.10','1995-06-08 11:46:23'), + ('green', 53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145','2016-09-19 14:45:51'), + ('green', 54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94','1976-08-17 11:26:19'), + ('green', 55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45','2006-05-27 16:51:40'), + ('green', 56,'Johnny','jvasquez1j@trellian.com','118.202.238.23','1975-11-17 08:42:32'), + ('green', 57,'Patrick','pramirez1k@tamu.edu','231.25.153.198','1997-08-06 11:51:09'), + ('green', 58,'Helen','hlarson1l@prweb.com','8.40.21.39','1993-08-04 19:53:40'), + ('green', 59,'Patricia','pspencer1m@gmpg.org','212.198.40.15','1977-08-03 16:37:27'), + ('green', 60,'Joseph','jspencer1n@marriott.com','13.15.63.238','2005-07-23 20:22:06'), + ('green', 61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190','1976-05-19 21:47:44'), + ('green', 62,'Joan','jwebb1p@google.ru','105.229.170.71','1972-09-07 17:53:47'), + ('green', 63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244','2000-01-01 22:33:37'), + ('green', 64,'Katherine','khunter1r@smh.com.au','248.168.205.32','1991-01-09 06:40:24'), + ('green', 65,'Laura','lvasquez1s@wiley.com','128.129.115.152','1997-10-23 12:04:56'), + ('green', 66,'Juan','jdunn1t@state.gov','44.228.124.51','2004-11-10 05:07:35'), + ('green', 67,'Judith','jholmes1u@wiley.com','40.227.179.115','1977-08-02 17:01:45'), + ('green', 68,'Beverly','bbaker1v@wufoo.com','208.34.84.59','2016-03-06 20:07:23'), + ('green', 69,'Lawrence','lcarr1w@flickr.com','59.158.212.223','1988-09-13 06:07:21'), + ('green', 70,'Gloria','gwilliams1x@mtv.com','245.231.88.33','1995-03-18 22:32:46'), + ('green', 71,'Steven','ssims1y@cbslocal.com','104.50.58.255','2001-08-05 21:26:20'), + ('green', 72,'Betty','bmills1z@arstechnica.com','103.177.214.220','1981-12-14 21:26:54'), + ('green', 73,'Mildred','mfuller20@prnewswire.com','151.158.8.130','2000-04-19 10:13:55'), + ('green', 74,'Donald','dday21@icq.com','9.178.102.255','1972-12-03 00:58:24'), + ('green', 75,'Eric','ethomas22@addtoany.com','85.2.241.227','1992-11-01 05:59:30'), + ('green', 76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36','1985-10-24 06:50:01'), + ('green', 77,'Maria','mmartinez24@amazonaws.com','143.189.167.135','2005-10-05 05:17:42'), + ('green', 78,'Harry','hburton25@youtube.com','156.47.176.237','1978-03-26 05:53:33'), + ('green', 79,'Kevin','klawrence26@hao123.com','79.136.183.83','1994-10-12 04:38:52'), + ('green', 80,'David','dhall27@prweb.com','133.149.172.153','1976-12-15 16:24:24'), + ('green', 81,'Kathy','kperry28@twitter.com','229.242.72.228','1979-03-04 02:58:56'), + ('green', 82,'Adam','aprice29@elegantthemes.com','13.145.21.10','1982-11-07 11:46:59'), + ('green', 83,'Brandon','bgriffin2a@va.gov','73.249.128.212','2013-10-30 05:30:36'), + ('green', 84,'Henry','hnguyen2b@discovery.com','211.36.214.242','1985-01-09 06:37:27'), + ('green', 85,'Eric','esanchez2c@edublogs.org','191.166.188.251','2004-05-01 23:21:42'), + ('green', 86,'Jason','jlee2d@jimdo.com','193.92.16.182','1973-01-08 09:05:39'), + ('green', 87,'Diana','drichards2e@istockphoto.com','19.130.175.245','1994-10-05 22:50:49'), + ('green', 88,'Andrea','awelch2f@abc.net.au','94.155.233.96','2002-04-26 08:41:44'), + ('green', 89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111','2003-08-25 07:56:39'), + ('green', 90,'Jane','jsims2h@seesaa.net','43.4.220.135','1987-03-20 20:39:04'), + ('green', 91,'Larry','lgrant2i@si.edu','97.126.79.34','2000-09-07 20:26:19'), + ('green', 92,'Louis','ldean2j@prnewswire.com','37.148.40.127','2011-09-16 20:12:14'), + ('green', 93,'Jennifer','jcampbell2k@xing.com','38.106.254.142','1988-07-15 05:06:49'), + ('green', 94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187','2009-12-15 06:16:54'), + ('green', 95,'Lori','lstevens2m@icq.com','181.250.181.58','1984-10-28 03:29:19'), + ('green', 96,'Judy','jsimpson2n@marriott.com','180.121.239.219','1986-02-07 15:18:10'), + ('green', 97,'Phillip','phoward2o@usa.gov','255.247.0.175','2002-12-26 08:44:45'), + ('green', 98,'Gloria','gwalker2p@usa.gov','156.140.7.128','1997-10-04 07:58:58'), + ('green', 99,'Paul','pjohnson2q@umn.edu','183.59.198.197','1991-11-14 12:33:55'), + ('green', 100,'Frank','fgreene2r@blogspot.com','150.143.68.121','2010-06-12 23:55:39'); diff --git a/tests/functional/custom_singular_tests/test_custom_singular_tests.py b/tests/functional/custom_singular_tests/test_custom_singular_tests.py new file mode 100644 index 000000000..1e902be52 --- /dev/null +++ b/tests/functional/custom_singular_tests/test_custom_singular_tests.py @@ -0,0 +1,110 @@ +from pathlib import Path + +from dbt.tests.util import run_dbt +import pytest + + +# from `test/integration/009_data_test` + +# +# Models +# + +models__table_copy = """ +{{ + config( + materialized='table' + ) +}} + +select * from {{ this.schema }}.seed +""" + +# +# Tests +# + +tests__fail_email_is_always_null = """ +select * +from {{ ref('table_copy') }} +where email is not null +""" + +tests__fail_no_ref = """ +select 1 +""" + +tests__dotted_path_pass_id_not_null = """ +{# Same as `pass_id_not_null` but with dots in its name #} + +select * +from {{ ref('table_copy') }} +where id is null +""" + +tests__pass_id_not_null = """ +select * +from {{ ref('table_copy') }} +where id is null +""" + +tests__pass_no_ref = """ +select 1 limit 0 +""" + + +class CustomSingularTestsBase(object): + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + """Create seed and downstream model tests are to be run on""" + project.run_sql_file(project.test_data_dir / Path("seed_expected.sql")) + + results = run_dbt() + assert len(results) == 1 + + @pytest.fixture(scope="class") + def models(self): + return {"table_copy.sql": models__table_copy} + + +class TestPassingTests(CustomSingularTestsBase): + @pytest.fixture(scope="class") + def tests(self): + return { + "my_db.my_schema.table_copy.pass_id_not_null.sql": tests__dotted_path_pass_id_not_null, + "tests__pass_id_not_null.sql": tests__pass_id_not_null, + "tests__pass_no_ref.sql": tests__pass_no_ref, + } + + def test_data_tests(self, project, tests): + test_results = run_dbt(["test"]) + assert len(test_results) == len(tests) + + for result in test_results: + assert result.status == "pass" + assert not result.skipped + assert result.failures == 0 + + +class TestFailingTests(CustomSingularTestsBase): + @pytest.fixture(scope="class") + def tests(self): + return { + "tests__fail_email_is_always_null.sql": tests__fail_email_is_always_null, + "tests__fail_no_ref.sql": tests__fail_no_ref, + } + + def test_data_tests(self, project, tests): + """assert that all deliberately failing tests actually fail""" + test_results = run_dbt(["test"], expect_pass=False) + assert len(test_results) == len(tests) + + for result in test_results: + assert result.status == "fail" + assert not result.skipped + assert result.failures > 0 + assert result.adapter_response == { + "_message": "SELECT 1", + "code": "SELECT", + "rows_affected": 1, + } diff --git a/tests/functional/dbt_runner.py b/tests/functional/dbt_runner.py new file mode 100644 index 000000000..01ebc8733 --- /dev/null +++ b/tests/functional/dbt_runner.py @@ -0,0 +1,43 @@ +import os +from typing import Callable, List, Optional + +from dbt.cli.main import dbtRunner, dbtRunnerResult +from dbt.contracts.graph.manifest import Manifest +from dbt.tests.util import get_run_results +from dbt_common.events.base_types import EventMsg + + +def assert_run_results_have_compiled_node_attributes( + args: List[str], result: dbtRunnerResult +) -> None: + commands_with_run_results = ["build", "compile", "docs", "run", "test"] + if not [a for a in args if a in commands_with_run_results] or not result.success: + return + + run_results = get_run_results(os.getcwd()) + for r in run_results["results"]: + if r["unique_id"].startswith("model") and r["status"] == "success": + assert "compiled_code" in r + assert "compiled" in r + + +_STANDARD_ASSERTIONS = [assert_run_results_have_compiled_node_attributes] + + +class dbtTestRunner(dbtRunner): + def __init__( + self, + manifest: Optional[Manifest] = None, + callbacks: Optional[List[Callable[[EventMsg], None]]] = None, + exit_assertions: Optional[List[Callable[[List[str], dbtRunnerResult], None]]] = None, + ): + self.exit_assertions = exit_assertions if exit_assertions else _STANDARD_ASSERTIONS + super().__init__(manifest, callbacks) + + def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult: + result = super().invoke(args, **kwargs) + + for assertion in self.exit_assertions: + assertion(args, result) + + return result diff --git a/tests/functional/defer_state/fixtures.py b/tests/functional/defer_state/fixtures.py new file mode 100644 index 000000000..8b1d3d35b --- /dev/null +++ b/tests/functional/defer_state/fixtures.py @@ -0,0 +1,424 @@ +seed_csv = """id,name +1,Alice +2,Bob +""" + +table_model_sql = """ +{{ config(materialized='table') }} +select * from {{ ref('ephemeral_model') }} + +-- establish a macro dependency to trigger state:modified.macros +-- depends on: {{ my_macro() }} +""" + +table_model_now_view_sql = """ +{{ config(materialized='view') }} +select * from {{ ref('ephemeral_model') }} + +-- establish a macro dependency to trigger state:modified.macros +-- depends on: {{ my_macro() }} +""" + +table_model_now_incremental_sql = """ +{{ config(materialized='incremental', on_schema_change='append_new_columns') }} +select * from {{ ref('ephemeral_model') }} + +-- establish a macro dependency to trigger state:modified.macros +-- depends on: {{ my_macro() }} +""" + +changed_table_model_sql = """ +{{ config(materialized='table') }} +select 1 as fun +""" + +view_model_sql = """ +select * from {{ ref('seed') }} + +-- establish a macro dependency that trips infinite recursion if not handled +-- depends on: {{ my_infinitely_recursive_macro() }} +""" + +view_model_now_table_sql = """ +{{ config(materialized='table') }} +select * from {{ ref('seed') }} + +-- establish a macro dependency that trips infinite recursion if not handled +-- depends on: {{ my_infinitely_recursive_macro() }} +""" + +changed_view_model_sql = """ +select * from no.such.table +""" + +ephemeral_model_sql = """ +{{ config(materialized='ephemeral') }} +select * from {{ ref('view_model') }} +""" + +changed_ephemeral_model_sql = """ +{{ config(materialized='ephemeral') }} +select * from no.such.table +""" + +schema_yml = """ +version: 2 +models: + - name: view_model + columns: + - name: id + data_tests: + - unique: + severity: error + - not_null + - name: name +""" + +no_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: {} + columns: + - name: id + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: True + columns: + - name: id + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +modified_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: True + columns: + - name: id + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: user_name + data_type: text +""" + +disabled_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: False + columns: + - name: id + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +versioned_no_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: {} + versions: + - v: 1 + columns: + - name: id + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +versioned_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: True + versions: + - v: 1 + columns: + - name: id + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +versioned_modified_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: True + versions: + - v: 1 + columns: + - name: id + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: user_name + data_type: text +""" + +versioned_disabled_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: False + versions: + - v: 1 + columns: + - name: id + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +constraint_schema_yml = """ +version: 2 +models: + - name: view_model + columns: + - name: id + data_tests: + - unique: + severity: error + - not_null + - name: name + - name: table_model + config: + contract: + enforced: True + constraints: + - type: primary_key + columns: [id] + columns: + - name: id + constraints: + - type: not_null + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +modified_column_constraint_schema_yml = """ +version: 2 +models: + - name: view_model + columns: + - name: id + data_tests: + - unique: + severity: error + - not_null + - name: name + - name: table_model + config: + contract: + enforced: True + constraints: + - type: primary_key + columns: [id] + columns: + - name: id + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +modified_model_constraint_schema_yml = """ +version: 2 +models: + - name: view_model + columns: + - name: id + data_tests: + - unique: + severity: error + - not_null + - name: name + - name: table_model + config: + contract: + enforced: True + columns: + - name: id + constraints: + - type: not_null + data_type: integer + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +exposures_yml = """ +version: 2 +exposures: + - name: my_exposure + type: application + depends_on: + - ref('view_model') + owner: + email: test@example.com +""" + +macros_sql = """ +{% macro my_macro() %} + {% do log('in a macro' ) %} +{% endmacro %} +""" + +infinite_macros_sql = """ +{# trigger infinite recursion if not handled #} + +{% macro my_infinitely_recursive_macro() %} + {{ return(adapter.dispatch('my_infinitely_recursive_macro')()) }} +{% endmacro %} + +{% macro default__my_infinitely_recursive_macro() %} + {% if unmet_condition %} + {{ my_infinitely_recursive_macro() }} + {% else %} + {{ return('') }} + {% endif %} +{% endmacro %} +""" + +snapshot_sql = """ +{% snapshot my_cool_snapshot %} + + {{ + config( + target_database=database, + target_schema=schema, + unique_key='id', + strategy='check', + check_cols=['id'], + ) + }} + select * from {{ ref('view_model') }} + +{% endsnapshot %} +""" + +model_1_sql = """ +select * from {{ ref('seed') }} +""" + +modified_model_1_sql = """ +select * from {{ ref('seed') }} +order by 1 +""" + +model_2_sql = """ +select id from {{ ref('model_1') }} +""" + +modified_model_2_sql = """ +select * from {{ ref('model_1') }} +order by 1 +""" + + +group_schema_yml = """ +groups: + - name: finance + owner: + email: finance@jaffleshop.com + +models: + - name: model_1 + config: + group: finance + - name: model_2 + config: + group: finance +""" + + +group_modified_schema_yml = """ +groups: + - name: accounting + owner: + email: finance@jaffleshop.com +models: + - name: model_1 + config: + group: accounting + - name: model_2 + config: + group: accounting +""" + +group_modified_fail_schema_yml = """ +groups: + - name: finance + owner: + email: finance@jaffleshop.com +models: + - name: model_1 + config: + group: accounting + - name: model_2 + config: + group: finance +""" diff --git a/tests/functional/defer_state/test_defer_state.py b/tests/functional/defer_state/test_defer_state.py new file mode 100644 index 000000000..814c2d729 --- /dev/null +++ b/tests/functional/defer_state/test_defer_state.py @@ -0,0 +1,329 @@ +from copy import deepcopy +import json +import os +import shutil + +from dbt.contracts.results import RunStatus +from dbt.exceptions import DbtRuntimeError +from dbt.tests.util import rm_file, run_dbt, write_file +import pytest + +import fixtures + + +class BaseDeferState: + @pytest.fixture(scope="class") + def models(self): + return { + "table_model.sql": fixtures.table_model_sql, + "view_model.sql": fixtures.view_model_sql, + "ephemeral_model.sql": fixtures.ephemeral_model_sql, + "schema.yml": fixtures.schema_yml, + "exposures.yml": fixtures.exposures_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "macros.sql": fixtures.macros_sql, + "infinite_macros.sql": fixtures.infinite_macros_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "seed.csv": fixtures.seed_csv, + } + + @pytest.fixture(scope="class") + def snapshots(self): + return { + "snapshot.sql": fixtures.snapshot_sql, + } + + @pytest.fixture(scope="class") + def other_schema(self, unique_schema): + return unique_schema + "_other" + + @property + def project_config_update(self): + return { + "seeds": { + "test": { + "quote_columns": False, + } + } + } + + @pytest.fixture(scope="class") + def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema): + outputs = {"default": dbt_profile_target, "otherschema": deepcopy(dbt_profile_target)} + outputs["default"]["schema"] = unique_schema + outputs["otherschema"]["schema"] = other_schema + return {"test": {"outputs": outputs, "target": "default"}} + + def copy_state(self, project_root): + state_path = os.path.join(project_root, "state") + if not os.path.exists(state_path): + os.makedirs(state_path) + shutil.copyfile( + f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json" + ) + + def run_and_save_state(self, project_root, with_snapshot=False): + results = run_dbt(["seed"]) + assert len(results) == 1 + assert not any(r.node.deferred for r in results) + results = run_dbt(["run"]) + assert len(results) == 2 + assert not any(r.node.deferred for r in results) + results = run_dbt(["test"]) + assert len(results) == 2 + + if with_snapshot: + results = run_dbt(["snapshot"]) + assert len(results) == 1 + assert not any(r.node.deferred for r in results) + + # copy files + self.copy_state(project_root) + + +class TestDeferStateUnsupportedCommands(BaseDeferState): + def test_no_state(self, project): + # no "state" files present, snapshot fails + with pytest.raises(DbtRuntimeError): + run_dbt(["snapshot", "--state", "state", "--defer"]) + + +class TestRunCompileState(BaseDeferState): + def test_run_and_compile_defer(self, project): + self.run_and_save_state(project.project_root) + + # defer test, it succeeds + # Change directory to ensure that state directory is underneath + # project directory. + os.chdir(project.profiles_dir) + results = run_dbt(["compile", "--state", "state", "--defer"]) + assert len(results.results) == 6 + assert results.results[0].node.name == "seed" + + +class TestSnapshotState(BaseDeferState): + def test_snapshot_state_defer(self, project): + self.run_and_save_state(project.project_root) + # snapshot succeeds without --defer + run_dbt(["snapshot"]) + # copy files + self.copy_state(project.project_root) + # defer test, it succeeds + run_dbt(["snapshot", "--state", "state", "--defer"]) + # favor_state test, it succeeds + run_dbt(["snapshot", "--state", "state", "--defer", "--favor-state"]) + + +class TestRunDeferState(BaseDeferState): + def test_run_and_defer(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root) + + # test tests first, because run will change things + # no state, wrong schema, failure. + run_dbt(["test", "--target", "otherschema"], expect_pass=False) + + # test generate docs + # no state, wrong schema, empty nodes + catalog = run_dbt(["docs", "generate", "--target", "otherschema"]) + assert not catalog.nodes + + # no state, run also fails + run_dbt(["run", "--target", "otherschema"], expect_pass=False) + + # defer test, it succeeds + results = run_dbt( + ["test", "-m", "view_model+", "--state", "state", "--defer", "--target", "otherschema"] + ) + + # defer docs generate with state, catalog refers schema from the happy times + catalog = run_dbt( + [ + "docs", + "generate", + "-m", + "view_model+", + "--state", + "state", + "--defer", + "--target", + "otherschema", + ] + ) + assert "seed.test.seed" not in catalog.nodes + + # with state it should work though + results = run_dbt( + ["run", "-m", "view_model", "--state", "state", "--defer", "--target", "otherschema"] + ) + assert other_schema not in results[0].node.compiled_code + assert unique_schema in results[0].node.compiled_code + + with open("target/manifest.json") as fp: + data = json.load(fp) + assert data["nodes"]["seed.test.seed"]["deferred"] + + assert len(results) == 1 + + +class TestRunDeferStateChangedModel(BaseDeferState): + def test_run_defer_state_changed_model(self, project): + self.run_and_save_state(project.project_root) + + # change "view_model" + write_file(fixtures.changed_view_model_sql, "models", "view_model.sql") + + # the sql here is just wrong, so it should fail + run_dbt( + ["run", "-m", "view_model", "--state", "state", "--defer", "--target", "otherschema"], + expect_pass=False, + ) + # but this should work since we just use the old happy model + run_dbt( + ["run", "-m", "table_model", "--state", "state", "--defer", "--target", "otherschema"], + expect_pass=True, + ) + + # change "ephemeral_model" + write_file(fixtures.changed_ephemeral_model_sql, "models", "ephemeral_model.sql") + # this should fail because the table model refs a broken ephemeral + # model, which it should see + run_dbt( + ["run", "-m", "table_model", "--state", "state", "--defer", "--target", "otherschema"], + expect_pass=False, + ) + + +class TestRunDeferStateIFFNotExists(BaseDeferState): + def test_run_defer_iff_not_exists(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root) + + results = run_dbt(["seed", "--target", "otherschema"]) + assert len(results) == 1 + results = run_dbt(["run", "--state", "state", "--defer", "--target", "otherschema"]) + assert len(results) == 2 + + # because the seed now exists in our "other" schema, we should prefer it over the one + # available from state + assert other_schema in results[0].node.compiled_code + + # this time with --favor-state: even though the seed now exists in our "other" schema, + # we should still favor the one available from state + results = run_dbt( + ["run", "--state", "state", "--defer", "--favor-state", "--target", "otherschema"] + ) + assert len(results) == 2 + assert other_schema not in results[0].node.compiled_code + + +class TestDeferStateDeletedUpstream(BaseDeferState): + def test_run_defer_deleted_upstream(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root) + + # remove "ephemeral_model" + change "table_model" + rm_file("models", "ephemeral_model.sql") + write_file(fixtures.changed_table_model_sql, "models", "table_model.sql") + + # ephemeral_model is now gone. previously this caused a + # keyerror (dbt#2875), now it should pass + run_dbt( + ["run", "-m", "view_model", "--state", "state", "--defer", "--target", "otherschema"], + expect_pass=True, + ) + + # despite deferral, we should use models just created in our schema + results = run_dbt(["test", "--state", "state", "--defer", "--target", "otherschema"]) + assert other_schema in results[0].node.compiled_code + + # this time with --favor-state: prefer the models in the "other" schema, even though they exist in ours + run_dbt( + [ + "run", + "-m", + "view_model", + "--state", + "state", + "--defer", + "--favor-state", + "--target", + "otherschema", + ], + expect_pass=True, + ) + results = run_dbt(["test", "--state", "state", "--defer", "--favor-state"]) + assert other_schema not in results[0].node.compiled_code + + +class TestDeferStateFlag(BaseDeferState): + def test_defer_state_flag(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + + # test that state deferral works correctly + run_dbt(["compile", "--target-path", "target_compile"]) + write_file(fixtures.view_model_now_table_sql, "models", "table_model.sql") + + results = run_dbt(["ls", "--select", "state:modified", "--state", "target_compile"]) + assert results == ["test.table_model"] + + run_dbt(["seed", "--target", "otherschema", "--target-path", "target_otherschema"]) + + # this will fail because we haven't loaded the seed in the default schema + run_dbt( + [ + "run", + "--select", + "state:modified", + "--defer", + "--state", + "target_compile", + "--favor-state", + ], + expect_pass=False, + ) + + # this will fail because we haven't passed in --state + with pytest.raises( + DbtRuntimeError, match="Got a state selector method, but no comparison manifest" + ): + run_dbt( + [ + "run", + "--select", + "state:modified", + "--defer", + "--defer-state", + "target_otherschema", + "--favor-state", + ], + expect_pass=False, + ) + + # this will succeed because we've loaded the seed in other schema and are successfully deferring to it instead + results = run_dbt( + [ + "run", + "--select", + "state:modified", + "--defer", + "--state", + "target_compile", + "--defer-state", + "target_otherschema", + "--favor-state", + ] + ) + + assert len(results.results) == 1 + assert results.results[0].status == RunStatus.Success + assert results.results[0].node.name == "table_model" + assert results.results[0].adapter_response["rows_affected"] == 2 diff --git a/tests/functional/defer_state/test_group_updates.py b/tests/functional/defer_state/test_group_updates.py new file mode 100644 index 000000000..78c636842 --- /dev/null +++ b/tests/functional/defer_state/test_group_updates.py @@ -0,0 +1,108 @@ +import os + +from dbt.exceptions import ParsingError +from dbt.tests.util import copy_file, run_dbt, write_file +import pytest + +import fixtures + + +class GroupSetup: + @pytest.fixture(scope="class") + def models(self): + return { + "model_1.sql": fixtures.model_1_sql, + "model_2.sql": fixtures.model_2_sql, + "schema.yml": fixtures.group_schema_yml, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": fixtures.seed_csv} + + def group_setup(self): + # save initial state + run_dbt(["seed"]) + results = run_dbt(["compile"]) + + # add sanity checks for first result + assert len(results) == 3 + seed_result = results[0].node + assert seed_result.unique_id == "seed.test.seed" + model_1_result = results[1].node + assert model_1_result.unique_id == "model.test.model_1" + assert model_1_result.group == "finance" + model_2_result = results[2].node + assert model_2_result.unique_id == "model.test.model_2" + assert model_2_result.group == "finance" + + +class TestFullyModifiedGroups(GroupSetup): + def test_changed_groups(self, project): + self.group_setup() + + # copy manifest.json to "state" directory + os.makedirs("state") + target_path = os.path.join(project.project_root, "target") + copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) + + # update group name, modify model so it gets picked up + write_file(fixtures.modified_model_1_sql, "models", "model_1.sql") + write_file(fixtures.modified_model_2_sql, "models", "model_2.sql") + write_file(fixtures.group_modified_schema_yml, "models", "schema.yml") + + # this test is flaky if you don't clean first before the build + run_dbt(["clean"]) + # only thing in results should be model_1 + results = run_dbt(["build", "-s", "state:modified", "--defer", "--state", "./state"]) + + assert len(results) == 2 + model_1_result = results[0].node + assert model_1_result.unique_id == "model.test.model_1" + assert model_1_result.group == "accounting" # new group name! + model_2_result = results[1].node + assert model_2_result.unique_id == "model.test.model_2" + assert model_2_result.group == "accounting" # new group name! + + +class TestPartiallyModifiedGroups(GroupSetup): + def test_changed_groups(self, project): + self.group_setup() + + # copy manifest.json to "state" directory + os.makedirs("state") + target_path = os.path.join(project.project_root, "target") + copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) + + # update group name, modify model so it gets picked up + write_file(fixtures.modified_model_1_sql, "models", "model_1.sql") + write_file(fixtures.group_modified_schema_yml, "models", "schema.yml") + + # this test is flaky if you don't clean first before the build + run_dbt(["clean"]) + # only thing in results should be model_1 + results = run_dbt(["build", "-s", "state:modified", "--defer", "--state", "./state"]) + + assert len(results) == 1 + model_1_result = results[0].node + assert model_1_result.unique_id == "model.test.model_1" + assert model_1_result.group == "accounting" # new group name! + + +class TestBadGroups(GroupSetup): + def test_changed_groups(self, project): + self.group_setup() + + # copy manifest.json to "state" directory + os.makedirs("state") + target_path = os.path.join(project.project_root, "target") + copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) + + # update group with invalid name, modify model so it gets picked up + write_file(fixtures.modified_model_1_sql, "models", "model_1.sql") + write_file(fixtures.group_modified_fail_schema_yml, "models", "schema.yml") + + # this test is flaky if you don't clean first before the build + run_dbt(["clean"]) + with pytest.raises(ParsingError, match="Invalid group 'accounting'"): + run_dbt(["build", "-s", "state:modified", "--defer", "--state", "./state"]) diff --git a/tests/functional/defer_state/test_modified_state.py b/tests/functional/defer_state/test_modified_state.py new file mode 100644 index 000000000..fc5de2f36 --- /dev/null +++ b/tests/functional/defer_state/test_modified_state.py @@ -0,0 +1,969 @@ +import os +import random +import shutil +import string + +from dbt.exceptions import ContractBreakingChangeError +from dbt.tests.util import ( + get_manifest, + run_dbt, + run_dbt_and_capture, + update_config_file, + write_file, +) +from dbt_common.exceptions import CompilationError +import pytest + +import fixtures + + +class BaseModifiedState: + @pytest.fixture(scope="class") + def models(self): + return { + "table_model.sql": fixtures.table_model_sql, + "view_model.sql": fixtures.view_model_sql, + "ephemeral_model.sql": fixtures.ephemeral_model_sql, + "schema.yml": fixtures.schema_yml, + "exposures.yml": fixtures.exposures_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "macros.sql": fixtures.macros_sql, + "infinite_macros.sql": fixtures.infinite_macros_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": fixtures.seed_csv} + + @property + def project_config_update(self): + return { + "seeds": { + "test": { + "quote_columns": False, + } + } + } + + def copy_state(self): + if not os.path.exists("state"): + os.makedirs("state") + shutil.copyfile("target/manifest.json", "state/manifest.json") + + def run_and_save_state(self): + run_dbt(["seed"]) + run_dbt(["run"]) + self.copy_state() + + +class TestChangedSeedContents(BaseModifiedState): + def test_changed_seed_contents_state(self, project): + self.run_and_save_state() + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"], + expect_pass=True, + ) + assert len(results) == 0 + + results = run_dbt( + [ + "ls", + "--resource-type", + "seed", + "--exclude", + "state:unmodified", + "--state", + "./state", + ], + expect_pass=True, + ) + assert len(results) == 0 + + results = run_dbt( + [ + "ls", + "--resource-type", + "seed", + "--select", + "state:unmodified", + "--state", + "./state", + ], + expect_pass=True, + ) + assert len(results) == 1 + + # add a new row to the seed + changed_seed_contents = fixtures.seed_csv + "\n" + "3,carl" + write_file(changed_seed_contents, "seeds", "seed.csv") + + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"] + ) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt( + [ + "ls", + "--resource-type", + "seed", + "--exclude", + "state:unmodified", + "--state", + "./state", + ] + ) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "state:unmodified", "--state", "./state"] + ) + assert len(results) == 0 + + results = run_dbt(["ls", "--select", "state:modified", "--state", "./state"]) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt(["ls", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt(["ls", "--select", "state:unmodified", "--state", "./state"]) + assert len(results) == 6 + + results = run_dbt(["ls", "--select", "state:modified+", "--state", "./state"]) + assert len(results) == 7 + assert set(results) == { + "test.seed", + "test.table_model", + "test.view_model", + "test.ephemeral_model", + "test.not_null_view_model_id", + "test.unique_view_model_id", + "exposure:test.my_exposure", + } + + results = run_dbt(["ls", "--select", "state:unmodified+", "--state", "./state"]) + assert len(results) == 6 + assert set(results) == { + "test.table_model", + "test.view_model", + "test.ephemeral_model", + "test.not_null_view_model_id", + "test.unique_view_model_id", + "exposure:test.my_exposure", + } + + shutil.rmtree("./state") + self.copy_state() + + # make a very big seed + # assume each line is ~2 bytes + len(name) + target_size = 1 * 1024 * 1024 + line_size = 64 + num_lines = target_size // line_size + maxlines = num_lines + 4 + seed_lines = [fixtures.seed_csv] + for idx in range(4, maxlines): + value = "".join(random.choices(string.ascii_letters, k=62)) + seed_lines.append(f"{idx},{value}") + seed_contents = "\n".join(seed_lines) + write_file(seed_contents, "seeds", "seed.csv") + + # now if we run again, we should get a warning + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"] + ) + assert len(results) == 1 + assert results[0] == "test.seed" + + with pytest.raises(CompilationError) as exc: + run_dbt( + [ + "--warn-error", + "ls", + "--resource-type", + "seed", + "--select", + "state:modified", + "--state", + "./state", + ] + ) + assert ">1MB" in str(exc.value) + + # now check if unmodified returns none + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "state:unmodified", "--state", "./state"] + ) + assert len(results) == 0 + + shutil.rmtree("./state") + self.copy_state() + + # once it"s in path mode, we don"t mark it as modified if it changes + write_file(seed_contents + "\n1,test", "seeds", "seed.csv") + + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"], + expect_pass=True, + ) + assert len(results) == 0 + + results = run_dbt( + [ + "ls", + "--resource-type", + "seed", + "--exclude", + "state:unmodified", + "--state", + "./state", + ], + expect_pass=True, + ) + assert len(results) == 0 + + results = run_dbt( + [ + "ls", + "--resource-type", + "seed", + "--select", + "state:unmodified", + "--state", + "./state", + ], + expect_pass=True, + ) + assert len(results) == 1 + + +class TestChangedSeedConfig(BaseModifiedState): + def test_changed_seed_config(self, project): + self.run_and_save_state() + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"], + expect_pass=True, + ) + assert len(results) == 0 + + results = run_dbt( + [ + "ls", + "--resource-type", + "seed", + "--exclude", + "state:unmodified", + "--state", + "./state", + ], + expect_pass=True, + ) + assert len(results) == 0 + + results = run_dbt( + [ + "ls", + "--resource-type", + "seed", + "--select", + "state:unmodified", + "--state", + "./state", + ], + expect_pass=True, + ) + assert len(results) == 1 + + update_config_file({"seeds": {"test": {"quote_columns": False}}}, "dbt_project.yml") + + # quoting change -> seed changed + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"] + ) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt( + [ + "ls", + "--resource-type", + "seed", + "--exclude", + "state:unmodified", + "--state", + "./state", + ] + ) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "state:unmodified", "--state", "./state"] + ) + assert len(results) == 0 + + +class TestUnrenderedConfigSame(BaseModifiedState): + def test_unrendered_config_same(self, project): + self.run_and_save_state() + results = run_dbt( + ["ls", "--resource-type", "model", "--select", "state:modified", "--state", "./state"], + expect_pass=True, + ) + assert len(results) == 0 + + results = run_dbt( + [ + "ls", + "--resource-type", + "model", + "--exclude", + "state:unmodified", + "--state", + "./state", + ], + expect_pass=True, + ) + assert len(results) == 0 + + results = run_dbt( + [ + "ls", + "--resource-type", + "model", + "--select", + "state:unmodified", + "--state", + "./state", + ], + expect_pass=True, + ) + assert len(results) == 3 + + # although this is the default value, dbt will recognize it as a change + # for previously-unconfigured models, because it"s been explicitly set + update_config_file({"models": {"test": {"materialized": "view"}}}, "dbt_project.yml") + results = run_dbt( + ["ls", "--resource-type", "model", "--select", "state:modified", "--state", "./state"] + ) + assert len(results) == 1 + assert results[0] == "test.view_model" + + # converse of above statement + results = run_dbt( + [ + "ls", + "--resource-type", + "model", + "--exclude", + "state:unmodified", + "--state", + "./state", + ] + ) + assert len(results) == 1 + assert results[0] == "test.view_model" + + results = run_dbt( + [ + "ls", + "--resource-type", + "model", + "--select", + "state:unmodified", + "--state", + "./state", + ] + ) + assert len(results) == 2 + assert set(results) == { + "test.table_model", + "test.ephemeral_model", + } + + +class TestChangedModelContents(BaseModifiedState): + def test_changed_model_contents(self, project): + self.run_and_save_state() + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 0 + + table_model_update = """ + {{ config(materialized="table") }} + + select * from {{ ref("seed") }} + """ + + write_file(table_model_update, "models", "table_model.sql") + + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + +class TestNewMacro(BaseModifiedState): + def test_new_macro(self, project): + self.run_and_save_state() + + new_macro = """ + {% macro my_other_macro() %} + {% endmacro %} + """ + + # add a new macro to a new file + write_file(new_macro, "macros", "second_macro.sql") + + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 0 + + os.remove("macros/second_macro.sql") + # add a new macro to the existing file + with open("macros/macros.sql", "a") as fp: + fp.write(new_macro) + + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 0 + + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 0 + + +class TestChangedMacroContents(BaseModifiedState): + def test_changed_macro_contents(self, project): + self.run_and_save_state() + + # modify an existing macro + updated_macro = """ + {% macro my_macro() %} + {% do log("in a macro", info=True) %} + {% endmacro %} + """ + write_file(updated_macro, "macros", "macros.sql") + + # table_model calls this macro + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 1 + + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 1 + + +class TestChangedExposure(BaseModifiedState): + def test_changed_exposure(self, project): + self.run_and_save_state() + + # add an "owner.name" to existing exposure + updated_exposure = fixtures.exposures_yml + "\n name: John Doe\n" + write_file(updated_exposure, "models", "exposures.yml") + + results = run_dbt(["run", "--models", "+state:modified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "view_model" + + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 0 + + +class TestChangedContractUnversioned(BaseModifiedState): + MODEL_UNIQUE_ID = "model.test.table_model" + CONTRACT_SCHEMA_YML = fixtures.contract_schema_yml + MODIFIED_SCHEMA_YML = fixtures.modified_contract_schema_yml + DISABLED_SCHEMA_YML = fixtures.disabled_contract_schema_yml + NO_CONTRACT_SCHEMA_YML = fixtures.no_contract_schema_yml + + def test_changed_contract(self, project): + self.run_and_save_state() + + # update contract for table_model + write_file(self.CONTRACT_SCHEMA_YML, "models", "schema.yml") + + # This will find the table_model node modified both through a config change + # and by a non-breaking change to contract: true + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + manifest = get_manifest(project.project_root) + model_unique_id = self.MODEL_UNIQUE_ID + model = manifest.nodes[model_unique_id] + expected_unrendered_config = {"contract": {"enforced": True}, "materialized": "table"} + assert model.unrendered_config == expected_unrendered_config + + # Run it again with "state:modified:contract", still finds modified due to contract: true + results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + first_contract_checksum = model.contract.checksum + assert first_contract_checksum + # save a new state + self.copy_state() + + # This should raise because a column name has changed + write_file(self.MODIFIED_SCHEMA_YML, "models", "schema.yml") + results = run_dbt(["run"], expect_pass=False) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + second_contract_checksum = model.contract.checksum + # double check different contract_checksums + assert first_contract_checksum != second_contract_checksum + + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"], expect_pass=False + ) + expected_error = "This model has an enforced contract that failed." + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Please ensure the name, data_type, and number of columns in your contract match the columns in your model's definition" + assert expected_error in logs + assert expected_warning in logs + assert expected_change in logs + + # Go back to schema file without contract. Should throw a warning. + write_file(self.NO_CONTRACT_SCHEMA_YML, "models", "schema.yml") + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Contract enforcement was removed" + + # Now disable the contract. Should throw a warning - force warning into an error. + write_file(self.DISABLED_SCHEMA_YML, "models", "schema.yml") + with pytest.raises(CompilationError): + _, logs = run_dbt_and_capture( + [ + "--warn-error", + "run", + "--models", + "state:modified.contract", + "--state", + "./state", + ] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Contract enforcement was removed" + + +class TestChangedContractVersioned(BaseModifiedState): + MODEL_UNIQUE_ID = "model.test.table_model.v1" + CONTRACT_SCHEMA_YML = fixtures.versioned_contract_schema_yml + MODIFIED_SCHEMA_YML = fixtures.versioned_modified_contract_schema_yml + DISABLED_SCHEMA_YML = fixtures.versioned_disabled_contract_schema_yml + NO_CONTRACT_SCHEMA_YML = fixtures.versioned_no_contract_schema_yml + + def test_changed_contract_versioned(self, project): + self.run_and_save_state() + + # update contract for table_model + write_file(self.CONTRACT_SCHEMA_YML, "models", "schema.yml") + + # This will find the table_model node modified both through a config change + # and by a non-breaking change to contract: true + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + manifest = get_manifest(project.project_root) + model_unique_id = self.MODEL_UNIQUE_ID + model = manifest.nodes[model_unique_id] + expected_unrendered_config = {"contract": {"enforced": True}, "materialized": "table"} + assert model.unrendered_config == expected_unrendered_config + + # Run it again with "state:modified:contract", still finds modified due to contract: true + results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + first_contract_checksum = model.contract.checksum + assert first_contract_checksum + # save a new state + self.copy_state() + + # This should raise because a column name has changed + write_file(self.MODIFIED_SCHEMA_YML, "models", "schema.yml") + results = run_dbt(["run"], expect_pass=False) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + second_contract_checksum = model.contract.checksum + # double check different contract_checksums + assert first_contract_checksum != second_contract_checksum + with pytest.raises(ContractBreakingChangeError): + results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + + # Go back to schema file without contract. Should raise an error. + write_file(self.NO_CONTRACT_SCHEMA_YML, "models", "schema.yml") + with pytest.raises(ContractBreakingChangeError): + results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + + # Now disable the contract. Should raise an error. + write_file(self.DISABLED_SCHEMA_YML, "models", "schema.yml") + with pytest.raises(ContractBreakingChangeError): + results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + + +class TestChangedConstraintUnversioned(BaseModifiedState): + def test_changed_constraint(self, project): + self.run_and_save_state() + + # update constraint for table_model + write_file(fixtures.constraint_schema_yml, "models", "schema.yml") + + # This will find the table_model node modified both through adding constraint + # and by a non-breaking change to contract: true + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + manifest = get_manifest(project.project_root) + model_unique_id = "model.test.table_model" + model = manifest.nodes[model_unique_id] + expected_unrendered_config = {"contract": {"enforced": True}, "materialized": "table"} + assert model.unrendered_config == expected_unrendered_config + + # Run it again with "state:modified:contract", still finds modified due to contract: true + results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + first_contract_checksum = model.contract.checksum + assert first_contract_checksum + # save a new state + self.copy_state() + + # This should raise because a column level constraint was removed + write_file(fixtures.modified_column_constraint_schema_yml, "models", "schema.yml") + # we don't have a way to know this failed unless we have a previous state to refer to, so the run succeeds + results = run_dbt(["run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + second_contract_checksum = model.contract.checksum + # double check different contract_checksums + assert first_contract_checksum != second_contract_checksum + # since the models are unversioned, they raise a warning but not an error + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Enforced column level constraints were removed" + assert expected_warning in logs + assert expected_change in logs + + # This should raise because a model level constraint was removed (primary_key on id) + write_file(fixtures.modified_model_constraint_schema_yml, "models", "schema.yml") + # we don't have a way to know this failed unless we have a previous state to refer to, so the run succeeds + results = run_dbt(["run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + second_contract_checksum = model.contract.checksum + # double check different contract_checksums + assert first_contract_checksum != second_contract_checksum + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Enforced model level constraints were removed" + assert expected_warning in logs + assert expected_change in logs + + +class TestChangedMaterializationConstraint(BaseModifiedState): + def test_changed_materialization(self, project): + self.run_and_save_state() + + # update constraint for table_model + write_file(fixtures.constraint_schema_yml, "models", "schema.yml") + + # This will find the table_model node modified both through adding constraint + # and by a non-breaking change to contract: true + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + manifest = get_manifest(project.project_root) + model_unique_id = "model.test.table_model" + model = manifest.nodes[model_unique_id] + expected_unrendered_config = {"contract": {"enforced": True}, "materialized": "table"} + assert model.unrendered_config == expected_unrendered_config + + # Run it again with "state:modified:contract", still finds modified due to contract: true + results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + first_contract_checksum = model.contract.checksum + assert first_contract_checksum + # save a new state + self.copy_state() + + # This should raise because materialization changed from table to view + write_file(fixtures.table_model_now_view_sql, "models", "table_model.sql") + # we don't have a way to know this failed unless we have a previous state to refer to, so the run succeeds + results = run_dbt(["run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + second_contract_checksum = model.contract.checksum + # double check different contract_checksums + assert first_contract_checksum != second_contract_checksum + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Materialization changed with enforced constraints" + assert expected_warning in logs + assert expected_change in logs + + # This should not raise because materialization changed from table to incremental, both enforce constraints + write_file(fixtures.table_model_now_incremental_sql, "models", "table_model.sql") + # we don't have a way to know this failed unless we have a previous state to refer to, so the run succeeds + results = run_dbt(["run"]) + assert len(results) == 2 + + # This should pass because materialization changed from view to table which is the same as just adding new constraint, not breaking + write_file(fixtures.view_model_now_table_sql, "models", "view_model.sql") + write_file(fixtures.table_model_sql, "models", "table_model.sql") + results = run_dbt(["run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + second_contract_checksum = model.contract.checksum + # contract_checksums should be equal because we only save constraint related changes if the materialization is table/incremental + assert first_contract_checksum == second_contract_checksum + run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + assert len(results) == 2 + + +my_model_sql = """ +select 1 as id +""" + +modified_my_model_sql = """ +-- a comment +select 1 as id +""" + +modified_my_model_non_breaking_sql = """ +-- a comment +select 1 as id, 'blue' as color +""" + +my_model_yml = """ +models: + - name: my_model + latest_version: 1 + config: + contract: + enforced: true + columns: + - name: id + data_type: int + versions: + - v: 1 +""" + +modified_my_model_yml = """ +models: + - name: my_model + latest_version: 1 + config: + contract: + enforced: true + columns: + - name: id + data_type: text + versions: + - v: 1 +""" + +modified_my_model_non_breaking_yml = """ +models: + - name: my_model + latest_version: 1 + config: + contract: + enforced: true + columns: + - name: id + data_type: int + - name: color + data_type: text + versions: + - v: 1 +""" + + +class TestModifiedBodyAndContract: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model.yml": my_model_yml, + } + + def copy_state(self): + if not os.path.exists("state"): + os.makedirs("state") + shutil.copyfile("target/manifest.json", "state/manifest.json") + + def test_modified_body_and_contract(self, project): + results = run_dbt(["run"]) + assert len(results) == 1 + self.copy_state() + + # Change both body and contract in a *breaking* way (= changing data_type of existing column) + write_file(modified_my_model_yml, "models", "my_model.yml") + write_file(modified_my_model_sql, "models", "my_model.sql") + + # Should raise even without specifying state:modified.contract + with pytest.raises(ContractBreakingChangeError): + results = run_dbt(["run", "-s", "state:modified", "--state", "./state"]) + + with pytest.raises(ContractBreakingChangeError): + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + + # Change both body and contract in a *non-breaking* way (= adding a new column) + write_file(modified_my_model_non_breaking_yml, "models", "my_model.yml") + write_file(modified_my_model_non_breaking_sql, "models", "my_model.sql") + + # Should pass + run_dbt(["run", "-s", "state:modified", "--state", "./state"]) + + # The model's contract has changed, even if non-breaking, so it should be selected by 'state:modified.contract' + results = run_dbt(["list", "-s", "state:modified.contract", "--state", "./state"]) + assert results == ["test.my_model.v1"] + + +modified_table_model_access_yml = """ +version: 2 +models: + - name: table_model + access: public +""" + + +class TestModifiedAccess(BaseModifiedState): + def test_changed_access(self, project): + self.run_and_save_state() + + # No access change + assert not run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + + # Modify access (protected -> public) + write_file(modified_table_model_access_yml, "models", "schema.yml") + assert run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert results == ["test.table_model"] + + +modified_table_model_access_yml = """ +version: 2 +models: + - name: table_model + deprecation_date: 2020-01-01 +""" + + +class TestModifiedDeprecationDate(BaseModifiedState): + def test_changed_access(self, project): + self.run_and_save_state() + + # No access change + assert not run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + + # Modify deprecation_date (None -> 2020-01-01) + write_file(modified_table_model_access_yml, "models", "schema.yml") + assert run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert results == ["test.table_model"] + + +modified_table_model_version_yml = """ +version: 2 +models: + - name: table_model + versions: + - v: 1 + defined_in: table_model +""" + + +class TestModifiedVersion(BaseModifiedState): + def test_changed_access(self, project): + self.run_and_save_state() + + # Change version (null -> v1) + write_file(modified_table_model_version_yml, "models", "schema.yml") + + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert results == ["test.table_model.v1"] + + +table_model_latest_version_yml = """ +version: 2 +models: + - name: table_model + latest_version: 1 + versions: + - v: 1 + defined_in: table_model +""" + + +modified_table_model_latest_version_yml = """ +version: 2 +models: + - name: table_model + latest_version: 2 + versions: + - v: 1 + defined_in: table_model + - v: 2 +""" + + +class TestModifiedLatestVersion(BaseModifiedState): + def test_changed_access(self, project): + # Setup initial latest_version: 1 + write_file(table_model_latest_version_yml, "models", "schema.yml") + + self.run_and_save_state() + + # Bump latest version + write_file(fixtures.table_model_sql, "models", "table_model_v2.sql") + write_file(modified_table_model_latest_version_yml, "models", "schema.yml") + + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert results == ["test.table_model.v1", "test.table_model.v2"] diff --git a/tests/functional/defer_state/test_run_results_state.py b/tests/functional/defer_state/test_run_results_state.py new file mode 100644 index 000000000..795cdb974 --- /dev/null +++ b/tests/functional/defer_state/test_run_results_state.py @@ -0,0 +1,481 @@ +import os +import shutil + +from dbt.tests.util import run_dbt, write_file +import pytest + +import fixtures + + +class BaseRunResultsState: + @pytest.fixture(scope="class") + def models(self): + return { + "table_model.sql": fixtures.table_model_sql, + "view_model.sql": fixtures.view_model_sql, + "ephemeral_model.sql": fixtures.ephemeral_model_sql, + "schema.yml": fixtures.schema_yml, + "exposures.yml": fixtures.exposures_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "macros.sql": fixtures.macros_sql, + "infinite_macros.sql": fixtures.infinite_macros_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": fixtures.seed_csv} + + @property + def project_config_update(self): + return { + "seeds": { + "test": { + "quote_columns": False, + } + } + } + + def clear_state(self): + shutil.rmtree("./state") + + def copy_state(self): + if not os.path.exists("state"): + os.makedirs("state") + shutil.copyfile("target/manifest.json", "state/manifest.json") + shutil.copyfile("target/run_results.json", "state/run_results.json") + + def run_and_save_state(self): + run_dbt(["build"]) + self.copy_state() + + def rebuild_run_dbt(self, expect_pass=True): + self.clear_state() + run_dbt(["build"], expect_pass=expect_pass) + self.copy_state() + + def update_view_model_bad_sql(self): + # update view model to generate a failure case + not_unique_sql = "select * from forced_error" + write_file(not_unique_sql, "models", "view_model.sql") + + def update_view_model_failing_tests(self, with_dupes=True, with_nulls=False): + # test failure on build tests + # fail the unique test + select_1 = "select 1 as id" + select_stmts = [select_1] + if with_dupes: + select_stmts.append(select_1) + if with_nulls: + select_stmts.append("select null as id") + failing_tests_sql = " union all ".join(select_stmts) + write_file(failing_tests_sql, "models", "view_model.sql") + + def update_unique_test_severity_warn(self): + # change the unique test severity from error to warn and reuse the same view_model.sql changes above + new_config = fixtures.schema_yml.replace("error", "warn") + write_file(new_config, "models", "schema.yml") + + +class TestSeedRunResultsState(BaseRunResultsState): + def test_seed_run_results_state(self, project): + self.run_and_save_state() + self.clear_state() + run_dbt(["seed"]) + self.copy_state() + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "result:success", "--state", "./state"], + expect_pass=True, + ) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt(["ls", "--select", "result:success", "--state", "./state"]) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt(["ls", "--select", "result:success+", "--state", "./state"]) + assert len(results) == 7 + assert set(results) == { + "test.seed", + "test.table_model", + "test.view_model", + "test.ephemeral_model", + "test.not_null_view_model_id", + "test.unique_view_model_id", + "exposure:test.my_exposure", + } + + # add a new faulty row to the seed + changed_seed_contents = fixtures.seed_csv + "\n" + "\\\3,carl" + write_file(changed_seed_contents, "seeds", "seed.csv") + + self.clear_state() + run_dbt(["seed"], expect_pass=False) + self.copy_state() + + results = run_dbt( + ["ls", "--resource-type", "seed", "--select", "result:error", "--state", "./state"], + expect_pass=True, + ) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt(["ls", "--select", "result:error", "--state", "./state"]) + assert len(results) == 1 + assert results[0] == "test.seed" + + results = run_dbt(["ls", "--select", "result:error+", "--state", "./state"]) + assert len(results) == 7 + assert set(results) == { + "test.seed", + "test.table_model", + "test.view_model", + "test.ephemeral_model", + "test.not_null_view_model_id", + "test.unique_view_model_id", + "exposure:test.my_exposure", + } + + +class TestBuildRunResultsState(BaseRunResultsState): + def test_build_run_results_state(self, project): + self.run_and_save_state() + results = run_dbt(["build", "--select", "result:error", "--state", "./state"]) + assert len(results) == 0 + + self.update_view_model_bad_sql() + self.rebuild_run_dbt(expect_pass=False) + + results = run_dbt( + ["build", "--select", "result:error", "--state", "./state"], expect_pass=False + ) + assert len(results) == 3 + nodes = set([elem.node.name for elem in results]) + assert nodes == {"view_model", "not_null_view_model_id", "unique_view_model_id"} + + results = run_dbt(["ls", "--select", "result:error", "--state", "./state"]) + assert len(results) == 3 + assert set(results) == { + "test.view_model", + "test.not_null_view_model_id", + "test.unique_view_model_id", + } + + results = run_dbt( + ["build", "--select", "result:error+", "--state", "./state"], expect_pass=False + ) + assert len(results) == 4 + nodes = set([elem.node.name for elem in results]) + assert nodes == { + "table_model", + "view_model", + "not_null_view_model_id", + "unique_view_model_id", + } + + results = run_dbt(["ls", "--select", "result:error+", "--state", "./state"]) + assert len(results) == 6 # includes exposure + assert set(results) == { + "test.table_model", + "test.view_model", + "test.ephemeral_model", + "test.not_null_view_model_id", + "test.unique_view_model_id", + "exposure:test.my_exposure", + } + + self.update_view_model_failing_tests() + self.rebuild_run_dbt(expect_pass=False) + + results = run_dbt( + ["build", "--select", "result:fail", "--state", "./state"], expect_pass=False + ) + assert len(results) == 1 + assert results[0].node.name == "unique_view_model_id" + + results = run_dbt(["ls", "--select", "result:fail", "--state", "./state"]) + assert len(results) == 1 + assert results[0] == "test.unique_view_model_id" + + results = run_dbt( + ["build", "--select", "result:fail+", "--state", "./state"], expect_pass=False + ) + assert len(results) == 1 + nodes = set([elem.node.name for elem in results]) + assert nodes == {"unique_view_model_id"} + + results = run_dbt(["ls", "--select", "result:fail+", "--state", "./state"]) + assert len(results) == 1 + assert set(results) == {"test.unique_view_model_id"} + + self.update_unique_test_severity_warn() + self.rebuild_run_dbt(expect_pass=True) + + results = run_dbt( + ["build", "--select", "result:warn", "--state", "./state"], expect_pass=True + ) + assert len(results) == 1 + assert results[0].node.name == "unique_view_model_id" + + results = run_dbt(["ls", "--select", "result:warn", "--state", "./state"]) + assert len(results) == 1 + assert results[0] == "test.unique_view_model_id" + + results = run_dbt( + ["build", "--select", "result:warn+", "--state", "./state"], expect_pass=True + ) + assert len(results) == 1 + nodes = set([elem.node.name for elem in results]) + assert nodes == {"unique_view_model_id"} + + results = run_dbt(["ls", "--select", "result:warn+", "--state", "./state"]) + assert len(results) == 1 + assert set(results) == {"test.unique_view_model_id"} + + +class TestRunRunResultsState(BaseRunResultsState): + def test_run_run_results_state(self, project): + self.run_and_save_state() + results = run_dbt( + ["run", "--select", "result:success", "--state", "./state"], expect_pass=True + ) + assert len(results) == 2 + assert results[0].node.name == "view_model" + assert results[1].node.name == "table_model" + + # clear state and rerun upstream view model to test + operator + self.clear_state() + run_dbt(["run", "--select", "view_model"], expect_pass=True) + self.copy_state() + results = run_dbt( + ["run", "--select", "result:success+", "--state", "./state"], expect_pass=True + ) + assert len(results) == 2 + assert results[0].node.name == "view_model" + assert results[1].node.name == "table_model" + + # check we are starting from a place with 0 errors + results = run_dbt(["run", "--select", "result:error", "--state", "./state"]) + assert len(results) == 0 + + self.update_view_model_bad_sql() + self.clear_state() + run_dbt(["run"], expect_pass=False) + self.copy_state() + + # test single result selector on error + results = run_dbt( + ["run", "--select", "result:error", "--state", "./state"], expect_pass=False + ) + assert len(results) == 1 + assert results[0].node.name == "view_model" + + # test + operator selection on error + results = run_dbt( + ["run", "--select", "result:error+", "--state", "./state"], expect_pass=False + ) + assert len(results) == 2 + assert results[0].node.name == "view_model" + assert results[1].node.name == "table_model" + + # single result selector on skipped. Expect this to pass becase underlying view already defined above + results = run_dbt( + ["run", "--select", "result:skipped", "--state", "./state"], expect_pass=True + ) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + # add a downstream model that depends on table_model for skipped+ selector + downstream_model_sql = "select * from {{ref('table_model')}}" + write_file(downstream_model_sql, "models", "table_model_downstream.sql") + + self.clear_state() + run_dbt(["run"], expect_pass=False) + self.copy_state() + + results = run_dbt( + ["run", "--select", "result:skipped+", "--state", "./state"], expect_pass=True + ) + assert len(results) == 2 + assert results[0].node.name == "table_model" + assert results[1].node.name == "table_model_downstream" + + +class TestTestRunResultsState(BaseRunResultsState): + def test_test_run_results_state(self, project): + self.run_and_save_state() + # run passed nodes + results = run_dbt( + ["test", "--select", "result:pass", "--state", "./state"], expect_pass=True + ) + assert len(results) == 2 + nodes = set([elem.node.name for elem in results]) + assert nodes == {"unique_view_model_id", "not_null_view_model_id"} + + # run passed nodes with + operator + results = run_dbt( + ["test", "--select", "result:pass+", "--state", "./state"], expect_pass=True + ) + assert len(results) == 2 + nodes = set([elem.node.name for elem in results]) + assert nodes == {"unique_view_model_id", "not_null_view_model_id"} + + self.update_view_model_failing_tests() + self.rebuild_run_dbt(expect_pass=False) + + # test with failure selector + results = run_dbt( + ["test", "--select", "result:fail", "--state", "./state"], expect_pass=False + ) + assert len(results) == 1 + assert results[0].node.name == "unique_view_model_id" + + # test with failure selector and + operator + results = run_dbt( + ["test", "--select", "result:fail+", "--state", "./state"], expect_pass=False + ) + assert len(results) == 1 + assert results[0].node.name == "unique_view_model_id" + + self.update_unique_test_severity_warn() + # rebuild - expect_pass = True because we changed the error to a warning this time around + self.rebuild_run_dbt(expect_pass=True) + + # test with warn selector + results = run_dbt( + ["test", "--select", "result:warn", "--state", "./state"], expect_pass=True + ) + assert len(results) == 1 + assert results[0].node.name == "unique_view_model_id" + + # test with warn selector and + operator + results = run_dbt( + ["test", "--select", "result:warn+", "--state", "./state"], expect_pass=True + ) + assert len(results) == 1 + assert results[0].node.name == "unique_view_model_id" + + +class TestConcurrentSelectionRunResultsState(BaseRunResultsState): + def test_concurrent_selection_run_run_results_state(self, project): + self.run_and_save_state() + results = run_dbt( + ["run", "--select", "state:modified+", "result:error+", "--state", "./state"] + ) + assert len(results) == 0 + + self.update_view_model_bad_sql() + self.clear_state() + run_dbt(["run"], expect_pass=False) + self.copy_state() + + # add a new failing dbt model + bad_sql = "select * from forced_error" + write_file(bad_sql, "models", "table_model_modified_example.sql") + + results = run_dbt( + ["run", "--select", "state:modified+", "result:error+", "--state", "./state"], + expect_pass=False, + ) + assert len(results) == 3 + nodes = set([elem.node.name for elem in results]) + assert nodes == {"view_model", "table_model_modified_example", "table_model"} + + +class TestConcurrentSelectionTestRunResultsState(BaseRunResultsState): + def test_concurrent_selection_test_run_results_state(self, project): + self.run_and_save_state() + # create failure test case for result:fail selector + self.update_view_model_failing_tests(with_nulls=True) + + # run dbt build again to trigger test errors + self.rebuild_run_dbt(expect_pass=False) + + # get the failures from + results = run_dbt( + [ + "test", + "--select", + "result:fail", + "--exclude", + "not_null_view_model_id", + "--state", + "./state", + ], + expect_pass=False, + ) + assert len(results) == 1 + nodes = set([elem.node.name for elem in results]) + assert nodes == {"unique_view_model_id"} + + +class TestConcurrentSelectionBuildRunResultsState(BaseRunResultsState): + def test_concurrent_selectors_build_run_results_state(self, project): + self.run_and_save_state() + results = run_dbt( + ["build", "--select", "state:modified+", "result:error+", "--state", "./state"] + ) + assert len(results) == 0 + + self.update_view_model_bad_sql() + self.rebuild_run_dbt(expect_pass=False) + + # add a new failing dbt model + bad_sql = "select * from forced_error" + write_file(bad_sql, "models", "table_model_modified_example.sql") + + results = run_dbt( + ["build", "--select", "state:modified+", "result:error+", "--state", "./state"], + expect_pass=False, + ) + assert len(results) == 5 + nodes = set([elem.node.name for elem in results]) + assert nodes == { + "table_model_modified_example", + "view_model", + "table_model", + "not_null_view_model_id", + "unique_view_model_id", + } + + self.update_view_model_failing_tests() + + # create error model case for result:error selector + more_bad_sql = "select 1 as id from not_exists" + write_file(more_bad_sql, "models", "error_model.sql") + + # create something downstream from the error model to rerun + downstream_model_sql = "select * from {{ ref('error_model') }} )" + write_file(downstream_model_sql, "models", "downstream_of_error_model.sql") + + # regenerate build state + self.rebuild_run_dbt(expect_pass=False) + + # modify model again to trigger the state:modified selector + bad_again_sql = "select * from forced_anothererror" + write_file(bad_again_sql, "models", "table_model_modified_example.sql") + + results = run_dbt( + [ + "build", + "--select", + "state:modified+", + "result:error+", + "result:fail+", + "--state", + "./state", + ], + expect_pass=False, + ) + assert len(results) == 4 + nodes = set([elem.node.name for elem in results]) + assert nodes == { + "error_model", + "downstream_of_error_model", + "table_model_modified_example", + "unique_view_model_id", + } diff --git a/tests/functional/dependencies/data/seed.sql b/tests/functional/dependencies/data/seed.sql new file mode 100644 index 000000000..b74c3b35e --- /dev/null +++ b/tests/functional/dependencies/data/seed.sql @@ -0,0 +1,586 @@ +create table {schema}.seed ( + id INTEGER, + first_name VARCHAR(11), + email VARCHAR(31), + ip_address VARCHAR(15), + updated_at TIMESTAMP WITHOUT TIME ZONE +); + + +INSERT INTO {schema}.seed + ("id","first_name","email","ip_address","updated_at") +VALUES + (1,'Larry','lking0@miitbeian.gov.cn','69.135.206.194','2008-09-12 19:08:31'), + (2,'Larry','lperkins1@toplist.cz','64.210.133.162','1978-05-09 04:15:14'), + (3,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114','2011-10-16 04:07:57'), + (4,'Sandra','sgeorge3@livejournal.com','229.235.252.98','1973-07-19 10:52:43'), + (5,'Fred','fwoods4@google.cn','78.229.170.124','2012-09-30 16:38:29'), + (6,'Stephen','shanson5@livejournal.com','182.227.157.105','1995-11-07 21:40:50'), + (7,'William','wmartinez6@upenn.edu','135.139.249.50','1982-09-05 03:11:59'), + (8,'Jessica','jlong7@hao123.com','203.62.178.210','1991-10-16 11:03:15'), + (9,'Douglas','dwhite8@tamu.edu','178.187.247.1','1979-10-01 09:49:48'), + (10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249','2011-05-26 07:45:49'), + (11,'Ralph','rfieldsa@home.pl','55.152.163.149','1972-11-18 19:06:11'), + (12,'Louise','lnicholsb@samsung.com','141.116.153.154','2014-11-25 20:56:14'), + (13,'Clarence','cduncanc@sfgate.com','81.171.31.133','2011-11-17 07:02:36'), + (14,'Daniel','dfranklind@omniture.com','8.204.211.37','1980-09-13 00:09:04'), + (15,'Katherine','klanee@auda.org.au','176.96.134.59','1997-08-22 19:36:56'), + (16,'Billy','bwardf@wikia.com','214.108.78.85','2003-10-19 02:14:47'), + (17,'Annie','agarzag@ocn.ne.jp','190.108.42.70','1988-10-28 15:12:35'), + (18,'Shirley','scolemanh@fastcompany.com','109.251.164.84','1988-08-24 10:50:57'), + (19,'Roger','rfrazieri@scribd.com','38.145.218.108','1985-12-31 15:17:15'), + (20,'Lillian','lstanleyj@goodreads.com','47.57.236.17','1970-06-08 02:09:05'), + (21,'Aaron','arodriguezk@nps.gov','205.245.118.221','1985-10-11 23:07:49'), + (22,'Patrick','pparkerl@techcrunch.com','19.8.100.182','2006-03-29 12:53:56'), + (23,'Phillip','pmorenom@intel.com','41.38.254.103','2011-11-07 15:35:43'), + (24,'Henry','hgarcian@newsvine.com','1.191.216.252','2008-08-28 08:30:44'), + (25,'Irene','iturnero@opera.com','50.17.60.190','1994-04-01 07:15:02'), + (26,'Andrew','adunnp@pen.io','123.52.253.176','2000-11-01 06:03:25'), + (27,'David','dgutierrezq@wp.com','238.23.203.42','1988-01-25 07:29:18'), + (28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185','1983-01-01 13:36:37'), + (29,'Evelyn','epetersons@gizmodo.com','32.80.46.119','1979-07-16 17:24:12'), + (30,'Tammy','tmitchellt@purevolume.com','249.246.167.88','2001-04-03 10:00:23'), + (31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47','1986-02-11 21:35:50'), + (32,'Earl','eortizv@opera.com','166.47.248.240','1996-07-06 08:16:27'), + (33,'Juan','jgordonw@sciencedirect.com','71.77.2.200','1987-01-31 03:46:44'), + (34,'Diane','dhowellx@nyu.edu','140.94.133.12','1994-06-11 02:30:05'), + (35,'Randy','rkennedyy@microsoft.com','73.255.34.196','2005-05-26 20:28:39'), + (36,'Janice','jriveraz@time.com','22.214.227.32','1990-02-09 04:16:52'), + (37,'Laura','lperry10@diigo.com','159.148.145.73','2015-03-17 05:59:25'), + (38,'Gary','gray11@statcounter.com','40.193.124.56','1970-01-27 10:04:51'), + (39,'Jesse','jmcdonald12@typepad.com','31.7.86.103','2009-03-14 08:14:29'), + (40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239','1993-05-21 14:08:54'), + (41,'Scott','smoore14@archive.org','38.238.46.83','1980-08-30 11:16:56'), + (42,'Phillip','pevans15@cisco.com','158.234.59.34','2011-12-15 23:26:31'), + (43,'Steven','sriley16@google.ca','90.247.57.68','2011-10-29 19:03:28'), + (44,'Deborah','dbrown17@hexun.com','179.125.143.240','1995-04-10 14:36:07'), + (45,'Lori','lross18@ow.ly','64.80.162.180','1980-12-27 16:49:15'), + (46,'Sean','sjackson19@tumblr.com','240.116.183.69','1988-06-12 21:24:45'), + (47,'Terry','tbarnes1a@163.com','118.38.213.137','1997-09-22 16:43:19'), + (48,'Dorothy','dross1b@ebay.com','116.81.76.49','2005-02-28 13:33:24'), + (49,'Samuel','swashington1c@house.gov','38.191.253.40','1989-01-19 21:15:48'), + (50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174','2007-08-11 10:21:49'), + (51,'Wayne','whudson1e@princeton.edu','90.61.24.102','1983-07-03 16:58:12'), + (52,'Rose','rjames1f@plala.or.jp','240.83.81.10','1995-06-08 11:46:23'), + (53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145','2016-09-19 14:45:51'), + (54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94','1976-08-17 11:26:19'), + (55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45','2006-05-27 16:51:40'), + (56,'Johnny','jvasquez1j@trellian.com','118.202.238.23','1975-11-17 08:42:32'), + (57,'Patrick','pramirez1k@tamu.edu','231.25.153.198','1997-08-06 11:51:09'), + (58,'Helen','hlarson1l@prweb.com','8.40.21.39','1993-08-04 19:53:40'), + (59,'Patricia','pspencer1m@gmpg.org','212.198.40.15','1977-08-03 16:37:27'), + (60,'Joseph','jspencer1n@marriott.com','13.15.63.238','2005-07-23 20:22:06'), + (61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190','1976-05-19 21:47:44'), + (62,'Joan','jwebb1p@google.ru','105.229.170.71','1972-09-07 17:53:47'), + (63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244','2000-01-01 22:33:37'), + (64,'Katherine','khunter1r@smh.com.au','248.168.205.32','1991-01-09 06:40:24'), + (65,'Laura','lvasquez1s@wiley.com','128.129.115.152','1997-10-23 12:04:56'), + (66,'Juan','jdunn1t@state.gov','44.228.124.51','2004-11-10 05:07:35'), + (67,'Judith','jholmes1u@wiley.com','40.227.179.115','1977-08-02 17:01:45'), + (68,'Beverly','bbaker1v@wufoo.com','208.34.84.59','2016-03-06 20:07:23'), + (69,'Lawrence','lcarr1w@flickr.com','59.158.212.223','1988-09-13 06:07:21'), + (70,'Gloria','gwilliams1x@mtv.com','245.231.88.33','1995-03-18 22:32:46'), + (71,'Steven','ssims1y@cbslocal.com','104.50.58.255','2001-08-05 21:26:20'), + (72,'Betty','bmills1z@arstechnica.com','103.177.214.220','1981-12-14 21:26:54'), + (73,'Mildred','mfuller20@prnewswire.com','151.158.8.130','2000-04-19 10:13:55'), + (74,'Donald','dday21@icq.com','9.178.102.255','1972-12-03 00:58:24'), + (75,'Eric','ethomas22@addtoany.com','85.2.241.227','1992-11-01 05:59:30'), + (76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36','1985-10-24 06:50:01'), + (77,'Maria','mmartinez24@amazonaws.com','143.189.167.135','2005-10-05 05:17:42'), + (78,'Harry','hburton25@youtube.com','156.47.176.237','1978-03-26 05:53:33'), + (79,'Kevin','klawrence26@hao123.com','79.136.183.83','1994-10-12 04:38:52'), + (80,'David','dhall27@prweb.com','133.149.172.153','1976-12-15 16:24:24'), + (81,'Kathy','kperry28@twitter.com','229.242.72.228','1979-03-04 02:58:56'), + (82,'Adam','aprice29@elegantthemes.com','13.145.21.10','1982-11-07 11:46:59'), + (83,'Brandon','bgriffin2a@va.gov','73.249.128.212','2013-10-30 05:30:36'), + (84,'Henry','hnguyen2b@discovery.com','211.36.214.242','1985-01-09 06:37:27'), + (85,'Eric','esanchez2c@edublogs.org','191.166.188.251','2004-05-01 23:21:42'), + (86,'Jason','jlee2d@jimdo.com','193.92.16.182','1973-01-08 09:05:39'), + (87,'Diana','drichards2e@istockphoto.com','19.130.175.245','1994-10-05 22:50:49'), + (88,'Andrea','awelch2f@abc.net.au','94.155.233.96','2002-04-26 08:41:44'), + (89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111','2003-08-25 07:56:39'), + (90,'Jane','jsims2h@seesaa.net','43.4.220.135','1987-03-20 20:39:04'), + (91,'Larry','lgrant2i@si.edu','97.126.79.34','2000-09-07 20:26:19'), + (92,'Louis','ldean2j@prnewswire.com','37.148.40.127','2011-09-16 20:12:14'), + (93,'Jennifer','jcampbell2k@xing.com','38.106.254.142','1988-07-15 05:06:49'), + (94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187','2009-12-15 06:16:54'), + (95,'Lori','lstevens2m@icq.com','181.250.181.58','1984-10-28 03:29:19'), + (96,'Judy','jsimpson2n@marriott.com','180.121.239.219','1986-02-07 15:18:10'), + (97,'Phillip','phoward2o@usa.gov','255.247.0.175','2002-12-26 08:44:45'), + (98,'Gloria','gwalker2p@usa.gov','156.140.7.128','1997-10-04 07:58:58'), + (99,'Paul','pjohnson2q@umn.edu','183.59.198.197','1991-11-14 12:33:55'), + (100,'Frank','fgreene2r@blogspot.com','150.143.68.121','2010-06-12 23:55:39'), + (101,'Deborah','dknight2s@reverbnation.com','222.131.211.191','1970-07-08 08:54:23'), + (102,'Sandra','sblack2t@tripadvisor.com','254.183.128.254','2000-04-12 02:39:36'), + (103,'Edward','eburns2u@dailymotion.com','253.89.118.18','1993-10-10 10:54:01'), + (104,'Anthony','ayoung2v@ustream.tv','118.4.193.176','1978-08-26 17:07:29'), + (105,'Donald','dlawrence2w@wp.com','139.200.159.227','2007-07-21 20:56:20'), + (106,'Matthew','mfreeman2x@google.fr','205.26.239.92','2014-12-05 17:05:39'), + (107,'Sean','ssanders2y@trellian.com','143.89.82.108','1993-07-14 21:45:02'), + (108,'Sharon','srobinson2z@soundcloud.com','66.234.247.54','1977-04-06 19:07:03'), + (109,'Jennifer','jwatson30@t-online.de','196.102.127.7','1998-03-07 05:12:23'), + (110,'Clarence','cbrooks31@si.edu','218.93.234.73','2002-11-06 17:22:25'), + (111,'Jose','jflores32@goo.gl','185.105.244.231','1995-01-05 06:32:21'), + (112,'George','glee33@adobe.com','173.82.249.196','2015-01-04 02:47:46'), + (113,'Larry','lhill34@linkedin.com','66.5.206.195','2010-11-02 10:21:17'), + (114,'Marie','mmeyer35@mysql.com','151.152.88.107','1990-05-22 20:52:51'), + (115,'Clarence','cwebb36@skype.com','130.198.55.217','1972-10-27 07:38:54'), + (116,'Sarah','scarter37@answers.com','80.89.18.153','1971-08-24 19:29:30'), + (117,'Henry','hhughes38@webeden.co.uk','152.60.114.174','1973-01-27 09:00:42'), + (118,'Teresa','thenry39@hao123.com','32.187.239.106','2015-11-06 01:48:44'), + (119,'Billy','bgutierrez3a@sun.com','52.37.70.134','2002-03-19 03:20:19'), + (120,'Anthony','agibson3b@github.io','154.251.232.213','1991-04-19 01:08:15'), + (121,'Sandra','sromero3c@wikia.com','44.124.171.2','1998-09-06 20:30:34'), + (122,'Paula','pandrews3d@blogs.com','153.142.118.226','2003-06-24 16:31:24'), + (123,'Terry','tbaker3e@csmonitor.com','99.120.45.219','1970-12-09 23:57:21'), + (124,'Lois','lwilson3f@reuters.com','147.44.171.83','1971-01-09 22:28:51'), + (125,'Sara','smorgan3g@nature.com','197.67.192.230','1992-01-28 20:33:24'), + (126,'Charles','ctorres3h@china.com.cn','156.115.216.2','1993-10-02 19:36:34'), + (127,'Richard','ralexander3i@marriott.com','248.235.180.59','1999-02-03 18:40:55'), + (128,'Christina','charper3j@cocolog-nifty.com','152.114.116.129','1978-09-13 00:37:32'), + (129,'Steve','sadams3k@economist.com','112.248.91.98','2004-03-21 09:07:43'), + (130,'Katherine','krobertson3l@ow.ly','37.220.107.28','1977-03-18 19:28:50'), + (131,'Donna','dgibson3m@state.gov','222.218.76.221','1999-02-01 06:46:16'), + (132,'Christina','cwest3n@mlb.com','152.114.6.160','1979-12-24 15:30:35'), + (133,'Sandra','swillis3o@meetup.com','180.71.49.34','1984-09-27 08:05:54'), + (134,'Clarence','cedwards3p@smugmug.com','10.64.180.186','1979-04-16 16:52:10'), + (135,'Ruby','rjames3q@wp.com','98.61.54.20','2007-01-13 14:25:52'), + (136,'Sarah','smontgomery3r@tripod.com','91.45.164.172','2009-07-25 04:34:30'), + (137,'Sarah','soliver3s@eventbrite.com','30.106.39.146','2012-05-09 22:12:33'), + (138,'Deborah','dwheeler3t@biblegateway.com','59.105.213.173','1999-11-09 08:08:44'), + (139,'Deborah','dray3u@i2i.jp','11.108.186.217','2014-02-04 03:15:19'), + (140,'Paul','parmstrong3v@alexa.com','6.250.59.43','2009-12-21 10:08:53'), + (141,'Aaron','abishop3w@opera.com','207.145.249.62','1996-04-25 23:20:23'), + (142,'Henry','hsanders3x@google.ru','140.215.203.171','2012-01-29 11:52:32'), + (143,'Anne','aanderson3y@1688.com','74.150.102.118','1982-04-03 13:46:17'), + (144,'Victor','vmurphy3z@hugedomains.com','222.155.99.152','1987-11-03 19:58:41'), + (145,'Evelyn','ereid40@pbs.org','249.122.33.117','1977-12-14 17:09:57'), + (146,'Brian','bgonzalez41@wikia.com','246.254.235.141','1991-02-24 00:45:58'), + (147,'Sandra','sgray42@squarespace.com','150.73.28.159','1972-07-28 17:26:32'), + (148,'Alice','ajones43@a8.net','78.253.12.177','2002-12-05 16:57:46'), + (149,'Jessica','jhanson44@mapquest.com','87.229.30.160','1994-01-30 11:40:04'), + (150,'Louise','lbailey45@reuters.com','191.219.31.101','2011-09-07 21:11:45'), + (151,'Christopher','cgonzalez46@printfriendly.com','83.137.213.239','1984-10-24 14:58:04'), + (152,'Gregory','gcollins47@yandex.ru','28.176.10.115','1998-07-25 17:17:10'), + (153,'Jane','jperkins48@usnews.com','46.53.164.159','1979-08-19 15:25:00'), + (154,'Phyllis','plong49@yahoo.co.jp','208.140.88.2','1985-07-06 02:16:36'), + (155,'Adam','acarter4a@scribd.com','78.48.148.204','2005-07-20 03:31:09'), + (156,'Frank','fweaver4b@angelfire.com','199.180.255.224','2011-03-04 23:07:54'), + (157,'Ronald','rmurphy4c@cloudflare.com','73.42.97.231','1991-01-11 10:39:41'), + (158,'Richard','rmorris4d@e-recht24.de','91.9.97.223','2009-01-17 21:05:15'), + (159,'Rose','rfoster4e@woothemes.com','203.169.53.16','1991-04-21 02:09:38'), + (160,'George','ggarrett4f@uiuc.edu','186.61.5.167','1989-11-11 11:29:42'), + (161,'Victor','vhamilton4g@biblegateway.com','121.229.138.38','2012-06-22 18:01:23'), + (162,'Mark','mbennett4h@businessinsider.com','209.184.29.203','1980-04-16 15:26:34'), + (163,'Martin','mwells4i@ifeng.com','97.223.55.105','2010-05-26 14:08:18'), + (164,'Diana','dstone4j@google.ru','90.155.52.47','2013-02-11 00:14:54'), + (165,'Walter','wferguson4k@blogger.com','30.63.212.44','1986-02-20 17:46:46'), + (166,'Denise','dcoleman4l@vistaprint.com','10.209.153.77','1992-05-13 20:14:14'), + (167,'Philip','pknight4m@xing.com','15.28.135.167','2000-09-11 18:41:13'), + (168,'Russell','rcarr4n@youtube.com','113.55.165.50','2008-07-10 17:49:27'), + (169,'Donna','dburke4o@dion.ne.jp','70.0.105.111','1992-02-10 17:24:58'), + (170,'Anne','along4p@squidoo.com','36.154.58.107','2012-08-19 23:35:31'), + (171,'Clarence','cbanks4q@webeden.co.uk','94.57.53.114','1972-03-11 21:46:44'), + (172,'Betty','bbowman4r@cyberchimps.com','178.115.209.69','2013-01-13 21:34:51'), + (173,'Andrew','ahudson4s@nytimes.com','84.32.252.144','1998-09-15 14:20:04'), + (174,'Keith','kgordon4t@cam.ac.uk','189.237.211.102','2009-01-22 05:34:38'), + (175,'Patrick','pwheeler4u@mysql.com','47.22.117.226','1984-09-05 22:33:15'), + (176,'Jesse','jfoster4v@mapquest.com','229.95.131.46','1990-01-20 12:19:15'), + (177,'Arthur','afisher4w@jugem.jp','107.255.244.98','1983-10-13 11:08:46'), + (178,'Nicole','nryan4x@wsj.com','243.211.33.221','1974-05-30 23:19:14'), + (179,'Bruce','bjohnson4y@sfgate.com','17.41.200.101','1992-09-23 02:02:19'), + (180,'Terry','tcox4z@reference.com','20.189.120.106','1982-02-13 12:43:14'), + (181,'Ashley','astanley50@kickstarter.com','86.3.56.98','1976-05-09 01:27:16'), + (182,'Michael','mrivera51@about.me','72.118.249.0','1971-11-11 17:28:37'), + (183,'Steven','sgonzalez52@mozilla.org','169.112.247.47','2002-08-24 14:59:25'), + (184,'Kathleen','kfuller53@bloglovin.com','80.93.59.30','2002-03-11 13:41:29'), + (185,'Nicole','nhenderson54@usda.gov','39.253.60.30','1995-04-24 05:55:07'), + (186,'Ralph','rharper55@purevolume.com','167.147.142.189','1980-02-10 18:35:45'), + (187,'Heather','hcunningham56@photobucket.com','96.222.196.229','2007-06-15 05:37:50'), + (188,'Nancy','nlittle57@cbc.ca','241.53.255.175','2007-07-12 23:42:48'), + (189,'Juan','jramirez58@pinterest.com','190.128.84.27','1978-11-07 23:37:37'), + (190,'Beverly','bfowler59@chronoengine.com','54.144.230.49','1979-03-31 23:27:28'), + (191,'Shirley','sstevens5a@prlog.org','200.97.231.248','2011-12-06 07:08:50'), + (192,'Annie','areyes5b@squidoo.com','223.32.182.101','2011-05-28 02:42:09'), + (193,'Jack','jkelley5c@tiny.cc','47.34.118.150','1981-12-05 17:31:40'), + (194,'Keith','krobinson5d@1und1.de','170.210.209.31','1999-03-09 11:05:43'), + (195,'Joseph','jmiller5e@google.com.au','136.74.212.139','1984-10-08 13:18:20'), + (196,'Annie','aday5f@blogspot.com','71.99.186.69','1986-02-18 12:27:34'), + (197,'Nancy','nperez5g@liveinternet.ru','28.160.6.107','1983-10-20 17:51:20'), + (198,'Tammy','tward5h@ucoz.ru','141.43.164.70','1980-03-31 04:45:29'), + (199,'Doris','dryan5i@ted.com','239.117.202.188','1985-07-03 03:17:53'), + (200,'Rose','rmendoza5j@photobucket.com','150.200.206.79','1973-04-21 21:36:40'), + (201,'Cynthia','cbutler5k@hubpages.com','80.153.174.161','2001-01-20 01:42:26'), + (202,'Samuel','soliver5l@people.com.cn','86.127.246.140','1970-09-02 02:19:00'), + (203,'Carl','csanchez5m@mysql.com','50.149.237.107','1993-12-01 07:02:09'), + (204,'Kathryn','kowens5n@geocities.jp','145.166.205.201','2004-07-06 18:39:33'), + (205,'Nicholas','nnichols5o@parallels.com','190.240.66.170','2014-11-11 18:52:19'), + (206,'Keith','kwillis5p@youtube.com','181.43.206.100','1998-06-13 06:30:51'), + (207,'Justin','jwebb5q@intel.com','211.54.245.74','2000-11-04 16:58:26'), + (208,'Gary','ghicks5r@wikipedia.org','196.154.213.104','1992-12-01 19:48:28'), + (209,'Martin','mpowell5s@flickr.com','153.67.12.241','1983-06-30 06:24:32'), + (210,'Brenda','bkelley5t@xinhuanet.com','113.100.5.172','2005-01-08 20:50:22'), + (211,'Edward','eray5u@a8.net','205.187.246.65','2011-09-26 08:04:44'), + (212,'Steven','slawson5v@senate.gov','238.150.250.36','1978-11-22 02:48:09'), + (213,'Robert','rthompson5w@furl.net','70.7.89.236','2001-09-12 08:52:07'), + (214,'Jack','jporter5x@diigo.com','220.172.29.99','1976-07-26 14:29:21'), + (215,'Lisa','ljenkins5y@oakley.com','150.151.170.180','2010-03-20 19:21:16'), + (216,'Theresa','tbell5z@mayoclinic.com','247.25.53.173','2001-03-11 05:36:40'), + (217,'Jimmy','jstephens60@weather.com','145.101.93.235','1983-04-12 09:35:30'), + (218,'Louis','lhunt61@amazon.co.jp','78.137.6.253','1997-08-29 19:34:34'), + (219,'Lawrence','lgilbert62@ted.com','243.132.8.78','2015-04-08 22:06:56'), + (220,'David','dgardner63@4shared.com','204.40.46.136','1971-07-09 03:29:11'), + (221,'Charles','ckennedy64@gmpg.org','211.83.233.2','2011-02-26 11:55:04'), + (222,'Lillian','lbanks65@msu.edu','124.233.12.80','2010-05-16 20:29:02'), + (223,'Ernest','enguyen66@baidu.com','82.45.128.148','1996-07-04 10:07:04'), + (224,'Ryan','rrussell67@cloudflare.com','202.53.240.223','1983-08-05 12:36:29'), + (225,'Donald','ddavis68@ustream.tv','47.39.218.137','1989-05-27 02:30:56'), + (226,'Joe','jscott69@blogspot.com','140.23.131.75','1973-03-16 12:21:31'), + (227,'Anne','amarshall6a@google.ca','113.162.200.197','1988-12-09 03:38:29'), + (228,'Willie','wturner6b@constantcontact.com','85.83.182.249','1991-10-06 01:51:10'), + (229,'Nicole','nwilson6c@sogou.com','30.223.51.135','1977-05-29 19:54:56'), + (230,'Janet','jwheeler6d@stumbleupon.com','153.194.27.144','2011-03-13 12:48:47'), + (231,'Lois','lcarr6e@statcounter.com','0.41.36.53','1993-02-06 04:52:01'), + (232,'Shirley','scruz6f@tmall.com','37.156.39.223','2007-02-18 17:47:01'), + (233,'Patrick','pford6g@reverbnation.com','36.198.200.89','1977-03-06 15:47:24'), + (234,'Lisa','lhudson6h@usatoday.com','134.213.58.137','2014-10-28 01:56:56'), + (235,'Pamela','pmartinez6i@opensource.org','5.151.127.202','1987-11-30 16:44:47'), + (236,'Larry','lperez6j@infoseek.co.jp','235.122.96.148','1979-01-18 06:33:45'), + (237,'Pamela','pramirez6k@census.gov','138.233.34.163','2012-01-29 10:35:20'), + (238,'Daniel','dcarr6l@php.net','146.21.152.242','1984-11-17 08:22:59'), + (239,'Patrick','psmith6m@indiegogo.com','136.222.199.36','2001-05-30 22:16:44'), + (240,'Raymond','rhenderson6n@hc360.com','116.31.112.38','2000-01-05 20:35:41'), + (241,'Teresa','treynolds6o@miitbeian.gov.cn','198.126.205.220','1996-11-08 01:27:31'), + (242,'Johnny','jmason6p@flickr.com','192.8.232.114','2013-05-14 05:35:50'), + (243,'Angela','akelly6q@guardian.co.uk','234.116.60.197','1977-08-20 02:05:17'), + (244,'Douglas','dcole6r@cmu.edu','128.135.212.69','2016-10-26 17:40:36'), + (245,'Frances','fcampbell6s@twitpic.com','94.22.243.235','1987-04-26 07:07:13'), + (246,'Donna','dgreen6t@chron.com','227.116.46.107','2011-07-25 12:59:54'), + (247,'Benjamin','bfranklin6u@redcross.org','89.141.142.89','1974-05-03 20:28:18'), + (248,'Randy','rpalmer6v@rambler.ru','70.173.63.178','2011-12-20 17:40:18'), + (249,'Melissa','mmurray6w@bbb.org','114.234.118.137','1991-02-26 12:45:44'), + (250,'Jean','jlittle6x@epa.gov','141.21.163.254','1991-08-16 04:57:09'), + (251,'Daniel','dolson6y@nature.com','125.75.104.97','2010-04-23 06:25:54'), + (252,'Kathryn','kwells6z@eventbrite.com','225.104.28.249','2015-01-31 02:21:50'), + (253,'Theresa','tgonzalez70@ox.ac.uk','91.93.156.26','1971-12-11 10:31:31'), + (254,'Beverly','broberts71@bluehost.com','244.40.158.89','2013-09-21 13:02:31'), + (255,'Pamela','pmurray72@netscape.com','218.54.95.216','1985-04-16 00:34:00'), + (256,'Timothy','trichardson73@amazonaws.com','235.49.24.229','2000-11-11 09:48:28'), + (257,'Mildred','mpalmer74@is.gd','234.125.95.132','1992-05-25 02:25:02'), + (258,'Jessica','jcampbell75@google.it','55.98.30.140','2014-08-26 00:26:34'), + (259,'Beverly','bthomas76@cpanel.net','48.78.228.176','1970-08-18 10:40:05'), + (260,'Eugene','eward77@cargocollective.com','139.226.204.2','1996-12-04 23:17:00'), + (261,'Andrea','aallen78@webnode.com','160.31.214.38','2009-07-06 07:22:37'), + (262,'Justin','jruiz79@merriam-webster.com','150.149.246.122','2005-06-06 11:44:19'), + (263,'Kenneth','kedwards7a@networksolutions.com','98.82.193.128','2001-07-03 02:00:10'), + (264,'Rachel','rday7b@miibeian.gov.cn','114.15.247.221','1994-08-18 19:45:40'), + (265,'Russell','rmiller7c@instagram.com','184.130.152.253','1977-11-06 01:58:12'), + (266,'Bonnie','bhudson7d@cornell.edu','235.180.186.206','1990-12-03 22:45:24'), + (267,'Raymond','rknight7e@yandex.ru','161.2.44.252','1995-08-25 04:31:19'), + (268,'Bonnie','brussell7f@elpais.com','199.237.57.207','1991-03-29 08:32:06'), + (269,'Marie','mhenderson7g@elpais.com','52.203.131.144','2004-06-04 21:50:28'), + (270,'Alan','acarr7h@trellian.com','147.51.205.72','2005-03-03 10:51:31'), + (271,'Barbara','bturner7i@hugedomains.com','103.160.110.226','2004-08-04 13:42:40'), + (272,'Christina','cdaniels7j@census.gov','0.238.61.251','1972-10-18 12:47:33'), + (273,'Jeremy','jgomez7k@reuters.com','111.26.65.56','2013-01-13 10:41:35'), + (274,'Laura','lwood7l@icio.us','149.153.38.205','2011-06-25 09:33:59'), + (275,'Matthew','mbowman7m@auda.org.au','182.138.206.172','1999-03-05 03:25:36'), + (276,'Denise','dparker7n@icq.com','0.213.88.138','2011-11-04 09:43:06'), + (277,'Phillip','pparker7o@discuz.net','219.242.165.240','1973-10-19 04:22:29'), + (278,'Joan','jpierce7p@salon.com','63.31.213.202','1989-04-09 22:06:24'), + (279,'Irene','ibaker7q@cbc.ca','102.33.235.114','1992-09-04 13:00:57'), + (280,'Betty','bbowman7r@ted.com','170.91.249.242','2015-09-28 08:14:22'), + (281,'Teresa','truiz7s@boston.com','82.108.158.207','1999-07-18 05:17:09'), + (282,'Helen','hbrooks7t@slideshare.net','102.87.162.187','2003-01-06 15:45:29'), + (283,'Karen','kgriffin7u@wunderground.com','43.82.44.184','2010-05-28 01:56:37'), + (284,'Lisa','lfernandez7v@mtv.com','200.238.218.220','1993-04-03 20:33:51'), + (285,'Jesse','jlawrence7w@timesonline.co.uk','95.122.105.78','1990-01-05 17:28:43'), + (286,'Terry','tross7x@macromedia.com','29.112.114.133','2009-08-29 21:32:17'), + (287,'Angela','abradley7y@icq.com','177.44.27.72','1989-10-04 21:46:06'), + (288,'Maria','mhart7z@dailymotion.com','55.27.55.202','1975-01-21 01:22:57'), + (289,'Raymond','randrews80@pinterest.com','88.90.78.67','1992-03-16 21:37:40'), + (290,'Kathy','krice81@bluehost.com','212.63.196.102','2000-12-14 03:06:44'), + (291,'Cynthia','cramos82@nymag.com','107.89.190.6','2005-06-28 02:02:33'), + (292,'Kimberly','kjones83@mysql.com','86.169.101.101','2007-06-13 22:56:49'), + (293,'Timothy','thansen84@microsoft.com','108.100.254.90','2003-04-04 10:31:57'), + (294,'Carol','cspencer85@berkeley.edu','75.118.144.187','1999-03-30 14:53:21'), + (295,'Louis','lmedina86@latimes.com','141.147.163.24','1991-04-11 17:53:13'), + (296,'Margaret','mcole87@google.fr','53.184.26.83','1991-12-19 01:54:10'), + (297,'Mary','mgomez88@yellowpages.com','208.56.57.99','1976-05-21 18:05:08'), + (298,'Amanda','aanderson89@geocities.com','147.73.15.252','1987-08-22 15:05:28'), + (299,'Kathryn','kgarrett8a@nature.com','27.29.177.220','1976-07-15 04:25:04'), + (300,'Dorothy','dmason8b@shareasale.com','106.210.99.193','1990-09-03 21:39:31'), + (301,'Lois','lkennedy8c@amazon.de','194.169.29.187','2007-07-29 14:09:31'), + (302,'Irene','iburton8d@washingtonpost.com','196.143.110.249','2013-09-05 11:32:46'), + (303,'Betty','belliott8e@wired.com','183.105.222.199','1979-09-19 19:29:13'), + (304,'Bobby','bmeyer8f@census.gov','36.13.161.145','2014-05-24 14:34:39'), + (305,'Ann','amorrison8g@sfgate.com','72.154.54.137','1978-10-05 14:22:34'), + (306,'Daniel','djackson8h@wunderground.com','144.95.32.34','1990-07-27 13:23:05'), + (307,'Joe','jboyd8i@alibaba.com','187.105.86.178','2011-09-28 16:46:32'), + (308,'Ralph','rdunn8j@fc2.com','3.19.87.255','1984-10-18 08:00:40'), + (309,'Craig','ccarter8k@gizmodo.com','235.152.76.215','1998-07-04 12:15:21'), + (310,'Paula','pdean8l@hhs.gov','161.100.173.197','1973-02-13 09:38:55'), + (311,'Andrew','agarrett8m@behance.net','199.253.123.218','1991-02-14 13:36:32'), + (312,'Janet','jhowell8n@alexa.com','39.189.139.79','2012-11-24 20:17:33'), + (313,'Keith','khansen8o@godaddy.com','116.186.223.196','1987-08-23 21:22:05'), + (314,'Nicholas','nedwards8p@state.gov','142.175.142.11','1977-03-28 18:27:27'), + (315,'Jacqueline','jallen8q@oaic.gov.au','189.66.135.192','1994-10-26 11:44:26'), + (316,'Frank','fgardner8r@mapy.cz','154.77.119.169','1983-01-29 19:19:51'), + (317,'Eric','eharrison8s@google.cn','245.139.65.123','1984-02-04 09:54:36'), + (318,'Gregory','gcooper8t@go.com','171.147.0.221','2004-06-14 05:22:08'), + (319,'Jean','jfreeman8u@rakuten.co.jp','67.243.121.5','1977-01-07 18:23:43'), + (320,'Juan','jlewis8v@shinystat.com','216.181.171.189','2001-08-23 17:32:43'), + (321,'Randy','rwilliams8w@shinystat.com','105.152.146.28','1983-02-17 00:05:50'), + (322,'Stephen','shart8x@sciencedirect.com','196.131.205.148','2004-02-15 10:12:03'), + (323,'Annie','ahunter8y@example.com','63.36.34.103','2003-07-23 21:15:25'), + (324,'Melissa','mflores8z@cbc.ca','151.230.217.90','1983-11-02 14:53:56'), + (325,'Jane','jweaver90@about.me','0.167.235.217','1987-07-29 00:13:44'), + (326,'Anthony','asmith91@oracle.com','97.87.48.41','2001-05-31 18:44:11'), + (327,'Terry','tdavis92@buzzfeed.com','46.20.12.51','2015-09-12 23:13:55'), + (328,'Brandon','bmontgomery93@gravatar.com','252.101.48.186','2010-10-28 08:26:27'), + (329,'Chris','cmurray94@bluehost.com','25.158.167.97','2004-05-05 16:10:31'), + (330,'Denise','dfuller95@hugedomains.com','216.210.149.28','1979-04-20 08:57:24'), + (331,'Arthur','amcdonald96@sakura.ne.jp','206.42.36.213','2009-08-15 03:26:16'), + (332,'Jesse','jhoward97@google.cn','46.181.118.30','1974-04-18 14:08:41'), + (333,'Frank','fsimpson98@domainmarket.com','163.220.211.87','2006-06-30 14:46:52'), + (334,'Janice','jwoods99@pen.io','229.245.237.182','1988-04-06 11:52:58'), + (335,'Rebecca','rroberts9a@huffingtonpost.com','148.96.15.80','1976-10-05 08:44:16'), + (336,'Joshua','jray9b@opensource.org','192.253.12.198','1971-12-25 22:27:07'), + (337,'Joyce','jcarpenter9c@statcounter.com','125.171.46.215','2001-12-31 22:08:13'), + (338,'Andrea','awest9d@privacy.gov.au','79.101.180.201','1983-02-18 20:07:47'), + (339,'Christine','chudson9e@yelp.com','64.198.43.56','1997-09-08 08:03:43'), + (340,'Joe','jparker9f@earthlink.net','251.215.148.153','1973-11-04 05:08:18'), + (341,'Thomas','tkim9g@answers.com','49.187.34.47','1991-08-07 21:13:48'), + (342,'Janice','jdean9h@scientificamerican.com','4.197.117.16','2009-12-08 02:35:49'), + (343,'James','jmitchell9i@umich.edu','43.121.18.147','2011-04-28 17:04:09'), + (344,'Charles','cgardner9j@purevolume.com','197.78.240.240','1998-02-11 06:47:07'), + (345,'Robert','rhenderson9k@friendfeed.com','215.84.180.88','2002-05-10 15:33:14'), + (346,'Chris','cgray9l@4shared.com','249.70.192.240','1998-10-03 16:43:42'), + (347,'Gloria','ghayes9m@hibu.com','81.103.138.26','1999-12-26 11:23:13'), + (348,'Edward','eramirez9n@shareasale.com','38.136.90.136','2010-08-19 08:01:06'), + (349,'Cheryl','cbutler9o@google.ca','172.180.78.172','1995-05-27 20:03:52'), + (350,'Margaret','mwatkins9p@sfgate.com','3.20.198.6','2014-10-21 01:42:58'), + (351,'Rebecca','rwelch9q@examiner.com','45.81.42.208','2001-02-08 12:19:06'), + (352,'Joe','jpalmer9r@phpbb.com','163.202.92.190','1970-01-05 11:29:12'), + (353,'Sandra','slewis9s@dyndns.org','77.215.201.236','1974-01-05 07:04:04'), + (354,'Todd','tfranklin9t@g.co','167.125.181.82','2009-09-28 10:13:58'), + (355,'Joseph','jlewis9u@webmd.com','244.204.6.11','1990-10-21 15:49:57'), + (356,'Alan','aknight9v@nydailynews.com','152.197.95.83','1996-03-08 08:43:17'), + (357,'Sharon','sdean9w@123-reg.co.uk','237.46.40.26','1985-11-30 12:09:24'), + (358,'Annie','awright9x@cafepress.com','190.45.231.111','2000-08-24 11:56:06'), + (359,'Diane','dhamilton9y@youtube.com','85.146.171.196','2015-02-24 02:03:57'), + (360,'Antonio','alane9z@auda.org.au','61.63.146.203','2001-05-13 03:43:34'), + (361,'Matthew','mallena0@hhs.gov','29.97.32.19','1973-02-19 23:43:32'), + (362,'Bonnie','bfowlera1@soup.io','251.216.99.53','2013-08-01 15:35:41'), + (363,'Margaret','mgraya2@examiner.com','69.255.151.79','1998-01-23 22:24:59'), + (364,'Joan','jwagnera3@printfriendly.com','192.166.120.61','1973-07-13 00:30:22'), + (365,'Catherine','cperkinsa4@nytimes.com','58.21.24.214','2006-11-19 11:52:26'), + (366,'Mark','mcartera5@cpanel.net','220.33.102.142','2007-09-09 09:43:27'), + (367,'Paula','ppricea6@msn.com','36.182.238.124','2009-11-11 09:13:05'), + (368,'Catherine','cgreena7@army.mil','228.203.58.19','2005-08-09 16:52:15'), + (369,'Helen','hhamiltona8@symantec.com','155.56.194.99','2005-02-01 05:40:36'), + (370,'Jane','jmeyera9@ezinearticles.com','133.244.113.213','2013-11-06 22:10:23'), + (371,'Wanda','wevansaa@bloglovin.com','233.125.192.48','1994-12-26 23:43:42'), + (372,'Mark','mmarshallab@tumblr.com','114.74.60.47','2016-09-29 18:03:01'), + (373,'Andrew','amartinezac@google.cn','182.54.37.130','1976-06-06 17:04:17'), + (374,'Helen','hmoralesad@e-recht24.de','42.45.4.123','1977-03-28 19:06:59'), + (375,'Bonnie','bstoneae@php.net','196.149.79.137','1970-02-05 17:05:58'), + (376,'Douglas','dfreemanaf@nasa.gov','215.65.124.218','2008-11-20 21:51:55'), + (377,'Willie','wwestag@army.mil','35.189.92.118','1992-07-24 05:08:08'), + (378,'Cheryl','cwagnerah@upenn.edu','228.239.222.141','2010-01-25 06:29:01'), + (379,'Sandra','swardai@baidu.com','63.11.113.240','1985-05-23 08:07:37'), + (380,'Julie','jrobinsonaj@jugem.jp','110.58.202.50','2015-03-05 09:42:07'), + (381,'Larry','lwagnerak@shop-pro.jp','98.234.25.24','1975-07-22 22:22:02'), + (382,'Juan','jcastilloal@yelp.com','24.174.74.202','2007-01-17 09:32:43'), + (383,'Donna','dfrazieram@artisteer.com','205.26.147.45','1990-02-11 20:55:46'), + (384,'Rachel','rfloresan@w3.org','109.60.216.162','1983-05-22 22:42:18'), + (385,'Robert','rreynoldsao@theguardian.com','122.65.209.130','2009-05-01 18:02:51'), + (386,'Donald','dbradleyap@etsy.com','42.54.35.126','1997-01-16 16:31:52'), + (387,'Rachel','rfisheraq@nih.gov','160.243.250.45','2006-02-17 22:05:49'), + (388,'Nicholas','nhamiltonar@princeton.edu','156.211.37.111','1976-06-21 03:36:29'), + (389,'Timothy','twhiteas@ca.gov','36.128.23.70','1975-09-24 03:51:18'), + (390,'Diana','dbradleyat@odnoklassniki.ru','44.102.120.184','1983-04-27 09:02:50'), + (391,'Billy','bfowlerau@jimdo.com','91.200.68.196','1995-01-29 06:57:35'), + (392,'Bruce','bandrewsav@ucoz.com','48.12.101.125','1992-10-27 04:31:39'), + (393,'Linda','lromeroaw@usa.gov','100.71.233.19','1992-06-08 15:13:18'), + (394,'Debra','dwatkinsax@ucoz.ru','52.160.233.193','2001-11-11 06:51:01'), + (395,'Katherine','kburkeay@wix.com','151.156.242.141','2010-06-14 19:54:28'), + (396,'Martha','mharrisonaz@youku.com','21.222.10.199','1989-10-16 14:17:55'), + (397,'Dennis','dwellsb0@youtu.be','103.16.29.3','1985-12-21 06:05:51'), + (398,'Gloria','grichardsb1@bloglines.com','90.147.120.234','1982-08-27 01:04:43'), + (399,'Brenda','bfullerb2@t.co','33.253.63.90','2011-04-20 05:00:35'), + (400,'Larry','lhendersonb3@disqus.com','88.95.132.128','1982-08-31 02:15:12'), + (401,'Richard','rlarsonb4@wisc.edu','13.48.231.150','1979-04-15 14:08:09'), + (402,'Terry','thuntb5@usa.gov','65.91.103.240','1998-05-15 11:50:49'), + (403,'Harry','hburnsb6@nasa.gov','33.38.21.244','1981-04-12 14:02:20'), + (404,'Diana','dellisb7@mlb.com','218.229.81.135','1997-01-29 00:17:25'), + (405,'Jack','jburkeb8@tripadvisor.com','210.227.182.216','1984-03-09 17:24:03'), + (406,'Julia','jlongb9@fotki.com','10.210.12.104','2005-10-26 03:54:13'), + (407,'Lois','lscottba@msu.edu','188.79.136.138','1973-02-02 18:40:39'), + (408,'Sandra','shendersonbb@shareasale.com','114.171.220.108','2012-06-09 18:22:26'), + (409,'Irene','isanchezbc@cdbaby.com','109.255.50.119','1983-09-28 21:11:27'), + (410,'Emily','ebrooksbd@bandcamp.com','227.81.93.79','1970-08-31 21:08:01'), + (411,'Michelle','mdiazbe@businessweek.com','236.249.6.226','1993-05-22 08:07:07'), + (412,'Tammy','tbennettbf@wisc.edu','145.253.239.152','1978-12-31 20:24:51'), + (413,'Christine','cgreenebg@flickr.com','97.25.140.118','1978-07-17 12:55:30'), + (414,'Patricia','pgarzabh@tuttocitta.it','139.246.192.211','1984-02-27 13:40:08'), + (415,'Kimberly','kromerobi@aol.com','73.56.88.247','1976-09-16 14:22:04'), + (416,'George','gjohnstonbj@fda.gov','240.36.245.185','1979-07-24 14:36:02'), + (417,'Eugene','efullerbk@sciencedaily.com','42.38.105.140','2012-09-12 01:56:41'), + (418,'Andrea','astevensbl@goo.gl','31.152.207.204','1979-05-24 11:06:21'), + (419,'Shirley','sreidbm@scientificamerican.com','103.60.31.241','1984-02-23 04:07:41'), + (420,'Terry','tmorenobn@blinklist.com','92.161.34.42','1994-06-25 14:01:35'), + (421,'Christopher','cmorenobo@go.com','158.86.176.82','1973-09-05 09:18:47'), + (422,'Dennis','dhansonbp@ning.com','40.160.81.75','1982-01-20 10:19:41'), + (423,'Beverly','brussellbq@de.vu','138.32.56.204','1997-11-06 07:20:19'), + (424,'Howard','hparkerbr@163.com','103.171.134.171','2015-06-24 15:37:10'), + (425,'Helen','hmccoybs@fema.gov','61.200.4.71','1995-06-20 08:59:10'), + (426,'Ann','ahudsonbt@cafepress.com','239.187.71.125','1977-04-11 07:59:28'), + (427,'Tina','twestbu@nhs.uk','80.213.117.74','1992-08-19 05:54:44'), + (428,'Terry','tnguyenbv@noaa.gov','21.93.118.95','1991-09-19 23:22:55'), + (429,'Ashley','aburtonbw@wix.com','233.176.205.109','2009-11-10 05:01:20'), + (430,'Eric','emyersbx@1und1.de','168.91.212.67','1987-08-10 07:16:20'), + (431,'Barbara','blittleby@lycos.com','242.14.189.239','2008-08-02 12:13:04'), + (432,'Sean','sevansbz@instagram.com','14.39.177.13','2007-04-16 17:28:49'), + (433,'Shirley','sburtonc0@newsvine.com','34.107.138.76','1980-12-10 02:19:29'), + (434,'Patricia','pfreemanc1@so-net.ne.jp','219.213.142.117','1987-03-01 02:25:45'), + (435,'Paula','pfosterc2@vkontakte.ru','227.14.138.141','1972-09-22 12:59:34'), + (436,'Nicole','nstewartc3@1688.com','8.164.23.115','1998-10-27 00:10:17'), + (437,'Earl','ekimc4@ovh.net','100.26.244.177','2013-01-22 10:05:46'), + (438,'Beverly','breedc5@reuters.com','174.12.226.27','1974-09-22 07:29:36'), + (439,'Lawrence','lbutlerc6@a8.net','105.164.42.164','1992-06-05 00:43:40'), + (440,'Charles','cmoorec7@ucoz.com','252.197.131.69','1990-04-09 02:34:05'), + (441,'Alice','alawsonc8@live.com','183.73.220.232','1989-02-28 09:11:04'), + (442,'Dorothy','dcarpenterc9@arstechnica.com','241.47.200.14','2005-05-02 19:57:21'), + (443,'Carolyn','cfowlerca@go.com','213.109.55.202','1978-09-10 20:18:20'), + (444,'Anthony','alongcb@free.fr','169.221.158.204','1984-09-13 01:59:23'), + (445,'Annie','amoorecc@e-recht24.de','50.34.148.61','2009-03-26 03:41:07'), + (446,'Carlos','candrewscd@ihg.com','236.69.59.212','1972-03-29 22:42:48'), + (447,'Beverly','bramosce@google.ca','164.250.184.49','1982-11-10 04:34:01'), + (448,'Teresa','tlongcf@umich.edu','174.88.53.223','1987-05-17 12:48:00'), + (449,'Roy','rboydcg@uol.com.br','91.58.243.215','1974-06-16 17:59:54'), + (450,'Ashley','afieldsch@tamu.edu','130.138.11.126','1983-09-15 05:52:36'), + (451,'Judith','jhawkinsci@cmu.edu','200.187.103.245','2003-10-22 12:24:03'), + (452,'Rebecca','rwestcj@ocn.ne.jp','72.85.3.103','1980-11-13 11:01:26'), + (453,'Raymond','rporterck@infoseek.co.jp','146.33.216.151','1982-05-17 23:58:03'), + (454,'Janet','jmarshallcl@odnoklassniki.ru','52.46.193.166','1998-10-04 00:02:21'), + (455,'Shirley','speterscm@salon.com','248.126.31.15','1987-01-30 06:04:59'), + (456,'Annie','abowmancn@economist.com','222.213.248.59','2006-03-14 23:52:59'), + (457,'Jean','jlarsonco@blogspot.com','71.41.25.195','2007-09-08 23:49:45'), + (458,'Phillip','pmoralescp@stanford.edu','74.119.87.28','2011-03-14 20:25:40'), + (459,'Norma','nrobinsoncq@economist.com','28.225.21.54','1989-10-21 01:22:43'), + (460,'Kimberly','kclarkcr@dion.ne.jp','149.171.132.153','2008-06-27 02:27:30'), + (461,'Ruby','rmorriscs@ucla.edu','177.85.163.249','2016-01-28 16:43:44'), + (462,'Jonathan','jcastilloct@tripod.com','78.4.28.77','2000-05-24 17:33:06'), + (463,'Edward','ebryantcu@jigsy.com','140.31.98.193','1992-12-17 08:32:47'), + (464,'Chris','chamiltoncv@eepurl.com','195.171.234.206','1970-12-05 03:42:19'), + (465,'Michael','mweavercw@reference.com','7.233.133.213','1987-03-29 02:30:54'), + (466,'Howard','hlawrencecx@businessweek.com','113.225.124.224','1990-07-30 07:20:57'), + (467,'Philip','phowardcy@comsenz.com','159.170.247.249','2010-10-15 10:18:37'), + (468,'Mary','mmarshallcz@xing.com','125.132.189.70','2007-07-19 13:48:47'), + (469,'Scott','salvarezd0@theguardian.com','78.49.103.230','1987-10-31 06:10:44'), + (470,'Wayne','wcarrolld1@blog.com','238.1.120.204','1980-11-19 03:26:10'), + (471,'Jennifer','jwoodsd2@multiply.com','92.20.224.49','2010-05-06 22:17:04'), + (472,'Raymond','rwelchd3@toplist.cz','176.158.35.240','2007-12-12 19:02:51'), + (473,'Steven','sdixond4@wisc.edu','167.55.237.52','1984-05-05 11:44:37'), + (474,'Ralph','rjamesd5@ameblo.jp','241.190.50.133','2000-07-06 08:44:37'), + (475,'Jason','jrobinsond6@hexun.com','138.119.139.56','2006-02-03 05:27:45'), + (476,'Doris','dwoodd7@fema.gov','180.220.156.190','1978-05-11 20:14:20'), + (477,'Elizabeth','eberryd8@youtu.be','74.188.53.229','2006-11-18 08:29:06'), + (478,'Irene','igilbertd9@privacy.gov.au','194.152.218.1','1985-09-17 02:46:52'), + (479,'Jessica','jdeanda@ameblo.jp','178.103.93.118','1974-06-07 19:04:05'), + (480,'Rachel','ralvarezdb@phoca.cz','17.22.223.174','1999-03-08 02:43:25'), + (481,'Kenneth','kthompsondc@shinystat.com','229.119.91.234','2007-05-15 13:17:32'), + (482,'Harold','hmurraydd@parallels.com','133.26.188.80','1993-11-15 03:42:07'), + (483,'Paula','phowellde@samsung.com','34.215.28.216','1993-11-29 15:55:00'), + (484,'Ruth','rpiercedf@tripadvisor.com','111.30.130.123','1986-08-17 10:19:38'), + (485,'Phyllis','paustindg@vk.com','50.84.34.178','1994-04-13 03:05:24'), + (486,'Laura','lfosterdh@usnews.com','37.8.101.33','2001-06-30 08:58:59'), + (487,'Eric','etaylordi@com.com','103.183.253.45','2006-09-15 20:18:46'), + (488,'Doris','driveradj@prweb.com','247.16.2.199','1989-05-08 09:27:09'), + (489,'Ryan','rhughesdk@elegantthemes.com','103.234.153.232','1989-08-01 18:36:06'), + (490,'Steve','smoralesdl@jigsy.com','3.76.84.207','2011-03-13 17:01:05'), + (491,'Louis','lsullivandm@who.int','78.135.44.208','1975-11-26 16:01:23'), + (492,'Catherine','ctuckerdn@seattletimes.com','93.137.106.21','1990-03-13 16:14:56'), + (493,'Ann','adixondo@gmpg.org','191.136.222.111','2002-06-05 14:22:18'), + (494,'Johnny','jhartdp@amazon.com','103.252.198.39','1988-07-30 23:54:49'), + (495,'Susan','srichardsdq@skype.com','126.247.192.11','2005-01-09 12:08:14'), + (496,'Brenda','bparkerdr@skype.com','63.232.216.86','1974-05-18 05:58:29'), + (497,'Tammy','tmurphyds@constantcontact.com','56.56.37.112','2014-08-05 18:22:25'), + (498,'Larry','lhayesdt@wordpress.com','162.146.13.46','1997-02-26 14:01:53'), + (499,'Evelyn','ethomasdu@hhs.gov','6.241.88.250','2007-09-14 13:03:34'), + (500,'Paula','pshawdv@networksolutions.com','123.27.47.249','2003-10-30 21:19:20'); + +create table {schema}.seed_config_expected_1 as ( + + select *, 'default'::text as c1, 'default'::text as c2, 'was true'::text as some_bool from {schema}.seed + +); + +create table {schema}.seed_config_expected_2 as ( + + select *, 'abc'::text as c1, 'def'::text as c2, 'was true'::text as some_bool from {schema}.seed + +); + +create table {schema}.seed_config_expected_3 as ( + + select *, 'ghi'::text as c1, 'jkl'::text as c2, 'was true'::text as some_bool from {schema}.seed + +); + +create table {schema}.seed_summary ( + year timestamp without time zone, + count bigint +); + +INSERT INTO {schema}.seed_summary + ("year","count") +VALUES + ('1970-01-01 00:00:00',10), + ('1971-01-01 00:00:00',6), + ('1972-01-01 00:00:00',9), + ('1973-01-01 00:00:00',12), + ('1974-01-01 00:00:00',8), + ('1975-01-01 00:00:00',5), + ('1976-01-01 00:00:00',11), + ('1977-01-01 00:00:00',13), + ('1978-01-01 00:00:00',11), + ('1979-01-01 00:00:00',13), + ('1980-01-01 00:00:00',9), + ('1981-01-01 00:00:00',3), + ('1982-01-01 00:00:00',9), + ('1983-01-01 00:00:00',15), + ('1984-01-01 00:00:00',13), + ('1985-01-01 00:00:00',11), + ('1986-01-01 00:00:00',5), + ('1987-01-01 00:00:00',14), + ('1988-01-01 00:00:00',9), + ('1989-01-01 00:00:00',10), + ('1990-01-01 00:00:00',12), + ('1991-01-01 00:00:00',16), + ('1992-01-01 00:00:00',15), + ('1993-01-01 00:00:00',11), + ('1994-01-01 00:00:00',10), + ('1995-01-01 00:00:00',10), + ('1996-01-01 00:00:00',6), + ('1997-01-01 00:00:00',11), + ('1998-01-01 00:00:00',12), + ('1999-01-01 00:00:00',9), + ('2000-01-01 00:00:00',13), + ('2001-01-01 00:00:00',14), + ('2002-01-01 00:00:00',9), + ('2003-01-01 00:00:00',8), + ('2004-01-01 00:00:00',9), + ('2005-01-01 00:00:00',14), + ('2006-01-01 00:00:00',9), + ('2007-01-01 00:00:00',16), + ('2008-01-01 00:00:00',6), + ('2009-01-01 00:00:00',15), + ('2010-01-01 00:00:00',13), + ('2011-01-01 00:00:00',23), + ('2012-01-01 00:00:00',9), + ('2013-01-01 00:00:00',10), + ('2014-01-01 00:00:00',9), + ('2015-01-01 00:00:00',10), + ('2016-01-01 00:00:00',5); diff --git a/tests/functional/dependencies/data/update.sql b/tests/functional/dependencies/data/update.sql new file mode 100644 index 000000000..a3845ee41 --- /dev/null +++ b/tests/functional/dependencies/data/update.sql @@ -0,0 +1,7 @@ + +UPDATE {schema}.seed set first_name = 'Paul', updated_at = now() where id = 500; + +INSERT INTO {schema}.seed + ("id","first_name","email","ip_address","updated_at") +VALUES + (501, 'Steve', 'sthomas@hhs.gov', '6.241.88.251', now()); diff --git a/tests/functional/dependencies/duplicate_dependency/dbt_project.yml b/tests/functional/dependencies/duplicate_dependency/dbt_project.yml new file mode 100644 index 000000000..dbda758fc --- /dev/null +++ b/tests/functional/dependencies/duplicate_dependency/dbt_project.yml @@ -0,0 +1,5 @@ +name: 'test' +version: '1.0' +config-version: 2 + +profile: 'default' diff --git a/tests/functional/dependencies/early_hook_dependency/dbt_project.yml b/tests/functional/dependencies/early_hook_dependency/dbt_project.yml new file mode 100644 index 000000000..7cde1ad41 --- /dev/null +++ b/tests/functional/dependencies/early_hook_dependency/dbt_project.yml @@ -0,0 +1,6 @@ +name: early_hooks +version: '1.0' +config-version: 2 +on-run-start: + - create table {{ var('test_create_table') }} as (select 1 as id) + - create table {{ var('test_create_second_table') }} as (select 3 as id) diff --git a/tests/functional/dependencies/late_hook_dependency/dbt_project.yml b/tests/functional/dependencies/late_hook_dependency/dbt_project.yml new file mode 100644 index 000000000..62750f8d3 --- /dev/null +++ b/tests/functional/dependencies/late_hook_dependency/dbt_project.yml @@ -0,0 +1,6 @@ +name: late_hooks +version: '1.0' +config-version: 2 +on-run-start: + - insert into {{ var('test_create_table') }} values (2) + - insert into {{ var('test_create_second_table') }} values (4) diff --git a/tests/functional/dependencies/local_dependency/dbt_project.yml b/tests/functional/dependencies/local_dependency/dbt_project.yml new file mode 100644 index 000000000..d56280a55 --- /dev/null +++ b/tests/functional/dependencies/local_dependency/dbt_project.yml @@ -0,0 +1,23 @@ + +name: 'local_dep' +version: '1.0' +config-version: 2 + +profile: 'default' + +model-paths: ["models"] +analysis-paths: ["analyses"] +test-paths: ["tests"] +seed-paths: ["seeds"] +macro-paths: ["macros"] + +require-dbt-version: '>=0.1.0' + +target-path: "target" # directory which will store compiled SQL files +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_packages" + + +seeds: + quote_columns: False diff --git a/tests/functional/dependencies/local_dependency/macros/dep_macro.sql b/tests/functional/dependencies/local_dependency/macros/dep_macro.sql new file mode 100644 index 000000000..81e9a0fae --- /dev/null +++ b/tests/functional/dependencies/local_dependency/macros/dep_macro.sql @@ -0,0 +1,3 @@ +{% macro some_overridden_macro() -%} +100 +{%- endmacro %} diff --git a/tests/functional/dependencies/local_dependency/macros/generate_schema_name.sql b/tests/functional/dependencies/local_dependency/macros/generate_schema_name.sql new file mode 100644 index 000000000..1e8d62a6b --- /dev/null +++ b/tests/functional/dependencies/local_dependency/macros/generate_schema_name.sql @@ -0,0 +1,15 @@ +{# This should not be ignored, even as it's in a subpackage #} +{% macro generate_schema_name(custom_schema_name=none, node=none) -%} + {{ var('schema_override', target.schema) }} +{%- endmacro %} + +{# This should not be ignored, even as it's in a subpackage #} +{% macro generate_database_name(custom_database_name=none, node=none) -%} + {{ 'dbt' }} +{%- endmacro %} + + +{# This should not be ignored, even as it's in a subpackage #} +{% macro generate_alias_name(custom_alias_name=none, node=none) -%} + {{ node.name ~ '_subpackage_generate_alias_name' }} +{%- endmacro %} diff --git a/tests/functional/dependencies/local_dependency/models/model_to_import.sql b/tests/functional/dependencies/local_dependency/models/model_to_import.sql new file mode 100644 index 000000000..4b91aa0f2 --- /dev/null +++ b/tests/functional/dependencies/local_dependency/models/model_to_import.sql @@ -0,0 +1 @@ +select * from {{ ref('seed') }} diff --git a/tests/functional/dependencies/local_dependency/models/schema.yml b/tests/functional/dependencies/local_dependency/models/schema.yml new file mode 100644 index 000000000..4b3278eda --- /dev/null +++ b/tests/functional/dependencies/local_dependency/models/schema.yml @@ -0,0 +1,11 @@ +version: 2 +sources: + - name: my_source + schema: invalid_schema + tables: + - name: my_table + - name: seed_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + identifier: "seed_subpackage_generate_alias_name" diff --git a/tests/functional/dependencies/local_dependency/seeds/seed.csv b/tests/functional/dependencies/local_dependency/seeds/seed.csv new file mode 100644 index 000000000..3ff3deb87 --- /dev/null +++ b/tests/functional/dependencies/local_dependency/seeds/seed.csv @@ -0,0 +1,2 @@ +id +1 diff --git a/tests/functional/dependencies/models_local/dep_source_model.sql b/tests/functional/dependencies/models_local/dep_source_model.sql new file mode 100644 index 000000000..e7e5fcfd5 --- /dev/null +++ b/tests/functional/dependencies/models_local/dep_source_model.sql @@ -0,0 +1,2 @@ +{# If our dependency source didn't exist, this would be an errror #} +select * from {{ source('seed_source', 'seed') }} diff --git a/tests/functional/dependencies/models_local/my_configured_model.sql b/tests/functional/dependencies/models_local/my_configured_model.sql new file mode 100644 index 000000000..554ed3b44 --- /dev/null +++ b/tests/functional/dependencies/models_local/my_configured_model.sql @@ -0,0 +1,4 @@ +{{ + config(schema='configured') +}} +select * from {{ ref('model_to_import') }} diff --git a/tests/functional/dependencies/models_local/my_model.sql b/tests/functional/dependencies/models_local/my_model.sql new file mode 100644 index 000000000..a84f75e1e --- /dev/null +++ b/tests/functional/dependencies/models_local/my_model.sql @@ -0,0 +1,2 @@ + +select * from {{ ref('model_to_import') }} diff --git a/tests/functional/dependencies/models_local/schema.yml b/tests/functional/dependencies/models_local/schema.yml new file mode 100644 index 000000000..af65187f4 --- /dev/null +++ b/tests/functional/dependencies/models_local/schema.yml @@ -0,0 +1,7 @@ +version: 2 +sources: + - name: my_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: my_table + identifier: seed diff --git a/tests/functional/dependencies/models_local/source_override_model.sql b/tests/functional/dependencies/models_local/source_override_model.sql new file mode 100644 index 000000000..d567d2083 --- /dev/null +++ b/tests/functional/dependencies/models_local/source_override_model.sql @@ -0,0 +1,2 @@ +{# If our source override didn't take, this would be an errror #} +select * from {{ source('my_source', 'my_table') }} diff --git a/tests/functional/dependencies/test_dependency_options.py b/tests/functional/dependencies/test_dependency_options.py new file mode 100644 index 000000000..08ffc5d2f --- /dev/null +++ b/tests/functional/dependencies/test_dependency_options.py @@ -0,0 +1,106 @@ +import os +import shutil + +from dbt.tests.util import run_dbt +import pytest + + +class TestDepsOptions(object): + # this revision of dbt-integration-project requires dbt-utils.git@0.5.0, which the + # package config handling should detect + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "package": "fivetran/fivetran_utils", + "version": "0.4.7", + }, + ] + } + + @pytest.fixture + def clean_start(self, project): + if os.path.exists("dbt_packages"): + shutil.rmtree("dbt_packages") + if os.path.exists("package-lock.yml"): + os.remove("package-lock.yml") + + def test_deps_lock(self, clean_start): + run_dbt(["deps", "--lock"]) + assert not os.path.exists("dbt_packages") + assert os.path.exists("package-lock.yml") + with open("package-lock.yml") as fp: + contents = fp.read() + assert ( + contents + == """packages: +- package: fivetran/fivetran_utils + version: 0.4.7 +- package: dbt-labs/dbt_utils + version: 1.1.1 +sha1_hash: 71304bca2138cf8004070b3573a1e17183c0c1a8 +""" + ) + + def test_deps_default(self, clean_start): + run_dbt(["deps"]) + assert len(os.listdir("dbt_packages")) == 2 + assert os.path.exists("package-lock.yml") + with open("package-lock.yml") as fp: + contents = fp.read() + assert ( + contents + == """packages: +- package: fivetran/fivetran_utils + version: 0.4.7 +- package: dbt-labs/dbt_utils + version: 1.1.1 +sha1_hash: 71304bca2138cf8004070b3573a1e17183c0c1a8 +""" + ) + + def test_deps_add(self, clean_start): + run_dbt(["deps", "--add-package", "dbt-labs/audit_helper@0.9.0"]) + with open("packages.yml") as fp: + contents = fp.read() + assert ( + contents + == """packages: + - package: fivetran/fivetran_utils + version: 0.4.7 + - package: dbt-labs/audit_helper + version: 0.9.0 +""" + ) + assert len(os.listdir("dbt_packages")) == 3 + + def test_deps_add_without_install(self, clean_start): + os.rename("packages.yml", "dependencies.yml") + run_dbt( + [ + "deps", + "--add-package", + "dbt-labs/audit_helper@0.9.0", + "--lock", + ] + ) + assert not os.path.exists("dbt_packages") + assert not os.path.exists("packages.yml") + with open("dependencies.yml") as fp: + contents = fp.read() + assert ( + contents + == """packages: + - package: fivetran/fivetran_utils + version: 0.4.7 + - package: dbt-labs/audit_helper + version: 0.9.0 +""" + ) + + def test_deps_upgrade(self, clean_start, mocker): + run_dbt(["deps", "--lock"]) + patched_lock = mocker.patch("dbt.task.deps.DepsTask.lock") + run_dbt(["deps", "--upgrade"]) + assert patched_lock.call_count == 1 diff --git a/tests/functional/dependencies/test_local_dependency.py b/tests/functional/dependencies/test_local_dependency.py new file mode 100644 index 000000000..6dcd7b4fa --- /dev/null +++ b/tests/functional/dependencies/test_local_dependency.py @@ -0,0 +1,352 @@ +import json +import os +from pathlib import Path +import shutil +from unittest import mock + +from dbt.exceptions import DbtProjectError, DependencyError +from dbt.tests.util import check_relations_equal, run_dbt, run_dbt_and_capture +from dbt_common.exceptions import CompilationError, DbtRuntimeError +import dbt_common.semver as semver +import pytest +import yaml + +from tests.functional.utils import up_one + + +models__dep_source = """ +{# If our dependency source didn't exist, this would be an errror #} +select * from {{ source('seed_source', 'seed') }} +""" + +models__my_configured_model = """ +{{ + config(schema='configured') +}} +select * from {{ ref('model_to_import') }} +""" + +models__my_model = """ +select * from {{ ref('model_to_import') }} +""" + +models__source_override_model = """ +{# If our source override didn't take, this would be an errror #} +select * from {{ source('my_source', 'my_table') }} +""" + +models__iterate = """ +{% for x in no_such_dependency.no_such_method() %} +{% endfor %} +""" + +models__hooks_actual = """ +select * from {{ var('test_create_table') }} +union all +select * from {{ var('test_create_second_table') }} +""" + +models__hooks_expected = """ +{# surely there is a better way to do this! #} + +{% for _ in range(1, 5) %} +select {{ loop.index }} as id +{% if not loop.last %}union all{% endif %} +{% endfor %} +""" + +properties__schema_yml = """ +version: 2 +sources: + - name: my_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: my_table + identifier: seed_subpackage_generate_alias_name +""" + +macros__macro_sql = """ +{# This macro also exists in the dependency -dbt should be fine with that #} +{% macro some_overridden_macro() -%} +999 +{%- endmacro %} +""" + +macros__macro_override_schema_sql = """ +{% macro generate_schema_name(schema_name, node) -%} + + {{ schema_name }}_{{ node.schema }}_macro + +{%- endmacro %} +""" + + +class BaseDependencyTest(object): + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": macros__macro_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "dep_source_model.sql": models__dep_source, + "my_configured_model.sql": models__my_configured_model, + "my_model.sql": models__my_model, + "source_override_model.sql": models__source_override_model, + } + + @pytest.fixture(scope="class") + def properties(self): + return { + "schema.yml": properties__schema_yml, + } + + @pytest.fixture(scope="class", autouse=True) + def modify_schema_fqn(self, project): + schema_fqn = "{}.{}".format( + project.database, + project.test_schema, + ) + schema_fqn_configured = "{}.{}".format( + project.database, + project.test_schema + "_configured", + ) + + project.created_schemas.append(schema_fqn) + project.created_schemas.append(schema_fqn_configured) + + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project, modify_schema_fqn): + shutil.copytree( + project.test_dir / Path("local_dependency"), + project.project_root / Path("local_dependency"), + ) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + +class TestSimpleDependency(BaseDependencyTest): + def test_local_dependency(self, project): + run_dbt(["deps"]) + run_dbt(["seed"]) + results = run_dbt() + assert len(results) == 5 + + assert {r.node.schema for r in results} == { + project.test_schema, + project.test_schema + "_configured", + } + + base_schema_nodes = [r.node for r in results if r.node.schema == project.test_schema] + assert len(base_schema_nodes) == 4 + + check_relations_equal( + project.adapter, + [ + f"{project.test_schema}.source_override_model", + f"{project.test_schema}.seed_subpackage_generate_alias_name", + ], + ) + check_relations_equal( + project.adapter, + [ + f"{project.test_schema}.dep_source_model", + f"{project.test_schema}.seed_subpackage_generate_alias_name", + ], + ) + + def test_no_dependency_paths(self, project): + run_dbt(["deps"]) + run_dbt(["seed"]) + + # prove dependency does not exist as model in project + dep_path = os.path.join("models_local", "model_to_import.sql") + results = run_dbt( + ["run", "--models", f"+{dep_path}"], + ) + assert len(results) == 0 + + # prove model can run when importing that dependency + local_path = Path("models") / "my_model.sql" + results = run_dbt( + ["run", "--models", f"+{local_path}"], + ) + assert len(results) == 2 + + +class TestSimpleDependencyRelativePath(BaseDependencyTest): + def test_local_dependency_relative_path(self, project): + last_dir = Path(project.project_root).name + with up_one(): + _, stdout = run_dbt_and_capture(["deps", "--project-dir", last_dir]) + assert ( + "Installed from <local @ local_dependency>" in stdout + ), "Test output didn't contain expected string" + + +class TestMissingDependency(object): + @pytest.fixture(scope="class") + def models(self): + return { + "iterate.sql": models__iterate, + } + + def test_missing_dependency(self, project): + # dbt should raise a runtime exception + with pytest.raises(DbtRuntimeError): + run_dbt(["compile"]) + + +class TestSimpleDependencyWithSchema(BaseDependencyTest): + def dbt_vargs(self, schema): + # we can't add this to the config because Sources don't respect dbt_project.yml + vars_arg = yaml.safe_dump({"schema_override": "dbt_test_{}_macro".format(schema)}) + return ["--vars", vars_arg] + + def project_config(self): + return { + "models": { + "schema": "dbt_test", + }, + "seeds": { + "schema": "dbt_test", + }, + } + + @mock.patch("dbt.config.project.get_installed_version") + def test_local_dependency_out_of_date(self, mock_get, project): + mock_get.return_value = semver.VersionSpecifier.from_version_string("0.0.1") + run_dbt(["deps"] + self.dbt_vargs(project.test_schema)) + # check seed + with pytest.raises(DbtProjectError) as exc: + run_dbt(["seed"] + self.dbt_vargs(project.test_schema)) + assert "--no-version-check" in str(exc.value) + # check run too + with pytest.raises(DbtProjectError) as exc: + run_dbt(["run"] + self.dbt_vargs(project.test_schema)) + assert "--no-version-check" in str(exc.value) + + @mock.patch("dbt.config.project.get_installed_version") + def test_local_dependency_out_of_date_no_check(self, mock_get): + mock_get.return_value = semver.VersionSpecifier.from_version_string("0.0.1") + run_dbt(["deps"]) + run_dbt(["seed", "--no-version-check"]) + results = run_dbt(["run", "--no-version-check"]) + assert len(results) == 5 + + +class TestSimpleDependencyNoVersionCheckConfig(BaseDependencyTest): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "send_anonymous_usage_stats": False, + "version_check": False, + }, + "models": { + "schema": "dbt_test", + }, + "seeds": { + "schema": "dbt_test", + }, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": macros__macro_override_schema_sql} + + @mock.patch("dbt.config.project.get_installed_version") + def test_local_dependency_out_of_date_no_check(self, mock_get, project): + # we can't add this to the config because Sources don't respect dbt_project.yml + base_schema = "dbt_test_{}_macro".format(project.test_schema) + vars_arg = yaml.safe_dump( + { + "schema_override": base_schema, + } + ) + + mock_get.return_value = semver.VersionSpecifier.from_version_string("0.0.1") + run_dbt(["deps", "--vars", vars_arg]) + run_dbt(["seed", "--vars", vars_arg]) + results = run_dbt(["run", "--vars", vars_arg]) + len(results) == 5 + + +class TestSimpleDependencyHooks(BaseDependencyTest): + @pytest.fixture(scope="class") + def models(self): + return { + "actual.sql": models__hooks_actual, + "expected.sql": models__hooks_expected, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + # these hooks should run first, so nothing to drop + return { + "on-run-start": [ + "drop table if exists {{ var('test_create_table') }}", + "drop table if exists {{ var('test_create_second_table') }}", + ] + } + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [{"local": "early_hook_dependency"}, {"local": "late_hook_dependency"}] + } + + @pytest.fixture(scope="class") + def prepare_dependencies(self, project): + shutil.copytree( + project.test_dir / Path("early_hook_dependency"), + project.project_root / Path("early_hook_dependency"), + ) + shutil.copytree( + project.test_dir / Path("late_hook_dependency"), + project.project_root / Path("late_hook_dependency"), + ) + + def test_hook_dependency(self, prepare_dependencies, project): + cli_vars = json.dumps( + { + "test_create_table": '"{}"."hook_test"'.format(project.test_schema), + "test_create_second_table": '"{}"."hook_test_2"'.format(project.test_schema), + } + ) + + run_dbt(["deps", "--vars", cli_vars]) + results = run_dbt(["run", "--vars", cli_vars]) + assert len(results) == 2 + check_relations_equal(project.adapter, ["actual", "expected"]) + + +class TestSimpleDependencyDuplicateName(BaseDependencyTest): + @pytest.fixture(scope="class", autouse=True) + def setUp(self): + pass # do not copy local dependency automatically + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "duplicate_dependency"}]} + + @pytest.fixture(scope="class") + def prepare_dependencies(self, project): + shutil.copytree( + project.test_dir / Path("duplicate_dependency"), + project.project_root / Path("duplicate_dependency"), + ) + + def test_local_dependency_same_name(self, prepare_dependencies, project): + with pytest.raises(DependencyError): + run_dbt(["deps"], expect_pass=False) + + def test_local_dependency_same_name_sneaky(self, prepare_dependencies, project): + shutil.copytree("duplicate_dependency", "./dbt_packages/duplicate_dependency") + with pytest.raises(CompilationError): + run_dbt(["compile"]) + + # needed to avoid compilation errors from duplicate package names in test autocleanup + run_dbt(["clean"]) diff --git a/tests/functional/dependencies/test_simple_dependency.py b/tests/functional/dependencies/test_simple_dependency.py new file mode 100644 index 000000000..f35d902d6 --- /dev/null +++ b/tests/functional/dependencies/test_simple_dependency.py @@ -0,0 +1,435 @@ +import os +from pathlib import Path +import tempfile + +from dbt.exceptions import DbtProjectError +from dbt.tests.util import ( + check_relations_equal, + run_dbt, + write_config_file, +) +import pytest + + +models__disabled_one = """ +{{config(enabled=False)}} + +select 1 +""" + +models__disabled_two = """ +{{config(enabled=False)}} + +select * from {{ref('disabled_one')}} +""" + +models__empty = """ +""" + +models__view_summary = """ +{{ + config( + materialized='view' + ) +}} + + +with t as ( + + select * from {{ ref('view_model') }} + +) + +select date_trunc('year', updated_at) as year, + count(*) +from t +group by 1 +""" + + +class SimpleDependencyBase(object): + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + project.run_sql_file(project.test_data_dir / Path("seed.sql")) + + @pytest.fixture(scope="class") + def models(self): + return { + "empty.sql": models__empty, + "view_summary.sql": models__view_summary, + "view_summary.sql": models__view_summary, + } + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "revision": "1.1", + } + ] + } + + # These two functions included to enable override in ...NoProfile derived test class + @pytest.fixture(scope="class") + def run_deps(self, project): + return run_dbt(["deps"]) + + @pytest.fixture(scope="function") + def run_clean(self, project): + yield + + # clear test schema + assert os.path.exists("target") + run_dbt(["clean"]) + assert not os.path.exists("target") + + +class TestSimpleDependency(SimpleDependencyBase): + def test_simple_dependency(self, run_deps, project, run_clean): + """dependencies should draw from a changing base table""" + results = run_dbt() + assert len(results) == 4 + + check_relations_equal(project.adapter, ["seed", "table_model"]) + check_relations_equal(project.adapter, ["seed", "view_model"]) + check_relations_equal(project.adapter, ["seed", "incremental"]) + check_relations_equal(project.adapter, ["seed_summary", "view_summary"]) + + project.run_sql_file(project.test_data_dir / Path("update.sql")) + results = run_dbt() + assert len(results) == 4 + + check_relations_equal(project.adapter, ["seed", "table_model"]) + check_relations_equal(project.adapter, ["seed", "view_model"]) + check_relations_equal(project.adapter, ["seed", "incremental"]) + + +class TestSimpleDependencyWithDependenciesFile(SimpleDependencyBase): + @pytest.fixture(scope="class") + def packages(self): + return {} + + @pytest.fixture(scope="class") + def dependencies(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "warn-unpinned": True, + } + ] + } + + def test_dependency_with_dependencies_file(self, run_deps, project): + # Tests that "packages" defined in a dependencies.yml file works + run_dbt(["deps"]) + results = run_dbt() + assert len(results) == 4 + + +class TestSimpleDependencyWithEmptyPackagesFile(SimpleDependencyBase): + @pytest.fixture(scope="class") + def packages(self): + return " " + + def test_dependency_with_empty_packages_file(self, run_deps, project): + # Tests that an empty packages file doesn't fail with a Python error + run_dbt(["deps"]) + + +class TestSimpleDependencyNoProfile(SimpleDependencyBase): + """dbt deps and clean commands should not require a profile.""" + + @pytest.fixture(scope="class") + def run_deps(self, project): + with tempfile.TemporaryDirectory() as tmpdir: + result = run_dbt(["deps", "--profiles-dir", tmpdir]) + return result + + @pytest.fixture(scope="class") + def run_clean(self, project): + with tempfile.TemporaryDirectory() as tmpdir: + result = run_dbt(["clean", "--profiles-dir", tmpdir]) + return result + + def test_simple_dependency_no_profile(self, project, run_deps, run_clean): + """only need fixtures as opposed to any model assertions since those are + irrelevant and won't occur within the same runtime as a dbt run -s ...""" + pass + + +class TestSimpleDependencyWithModels(SimpleDependencyBase): + def test_simple_dependency_with_models(self, run_deps, project, run_clean): + results = run_dbt(["run", "--models", "view_model+"]) + len(results) == 2 + + check_relations_equal(project.adapter, ["seed", "view_model"]) + check_relations_equal(project.adapter, ["seed_summary", "view_summary"]) + + created_models = project.get_tables_in_schema() + + assert "table_model" not in created_models + assert "incremental" not in created_models + assert created_models["view_model"] == "view" + assert created_models["view_summary"] == "view" + + +class TestSimpleDependencyUnpinned(object): + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + project.run_sql_file(project.test_data_dir / Path("seed.sql")) + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "warn-unpinned": True, + } + ] + } + + def test_simple_dependency(self, project): + run_dbt(["deps"]) + + +class TestSimpleDependencyWithDuplicates(object): + # dbt should convert these into a single dependency internally + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "revision": "dbt/1.0.0", + }, + { + "git": "https://github.com/dbt-labs/dbt-integration-project.git", + "revision": "dbt/1.0.0", + }, + ] + } + + def test_simple_dependency_deps(self, project): + run_dbt(["deps"]) + + +class TestSimpleDependencyWithSubdirs(object): + # dbt should convert these into a single dependency internally + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-multipe-packages.git", + "subdirectory": "dbt-utils-main", + "revision": "v0.1.0", + }, + { + "git": "https://github.com/dbt-labs/dbt-multipe-packages.git", + "subdirectory": "dbt-date-main", + "revision": "v0.1.0", + }, + ] + } + + def test_git_with_multiple_subdir(self, project): + run_dbt(["deps"]) + assert os.path.exists("package-lock.yml") + expected = """packages: +- git: https://github.com/dbt-labs/dbt-multipe-packages.git + revision: 53782f3ede8fdf307ee1d8e418aa65733a4b72fa + subdirectory: dbt-utils-main +- git: https://github.com/dbt-labs/dbt-multipe-packages.git + revision: 53782f3ede8fdf307ee1d8e418aa65733a4b72fa + subdirectory: dbt-date-main +sha1_hash: b9c8042f29446c55a33f9f211737f445a640c7a1 +""" + with open("package-lock.yml") as fp: + contents = fp.read() + assert contents == expected + assert len(os.listdir("dbt_packages")) == 2 + + +class TestRekeyedDependencyWithSubduplicates(object): + # this revision of dbt-integration-project requires dbt-utils.git@0.5.0, which the + # package config handling should detect + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "revision": "config-1.0.0-deps", + }, + { + "git": "https://github.com/dbt-labs/dbt-utils", + "revision": "0.5.0", + }, + ] + } + + def test_simple_dependency_deps(self, project): + run_dbt(["deps"]) + assert len(os.listdir("dbt_packages")) == 2 + + +class TestTarballNestedDependencies(object): + # this version of dbt_expectations has a dependency on dbt_date, which the + # package config handling should detect + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "tarball": "https://github.com/calogica/dbt-expectations/archive/refs/tags/0.9.0.tar.gz", + "name": "dbt_expectations", + }, + ] + } + + def test_simple_dependency_deps(self, project): + run_dbt(["deps"]) + assert set(os.listdir("dbt_packages")) == set(["dbt_expectations", "dbt_date"]) + + +class DependencyBranchBase(object): + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + project.run_sql_file(project.test_data_dir / Path("seed.sql")) + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "revision": "dbt/1.0.0", + }, + ] + } + + def deps_run_assert_equality(self, project): + run_dbt(["deps"]) + results = run_dbt() + assert len(results) == 4 + + check_relations_equal(project.adapter, ["seed", "table_model"]) + check_relations_equal(project.adapter, ["seed", "view_model"]) + check_relations_equal(project.adapter, ["seed", "incremental"]) + + created_models = project.get_tables_in_schema() + + assert created_models["table_model"] == "table" + assert created_models["view_model"] == "view" + assert created_models["view_summary"] == "view" + assert created_models["incremental"] == "table" + + +class TestSimpleDependencyBranch(DependencyBranchBase): + @pytest.fixture(scope="class") + def models(self): + return { + "view_summary.sql": models__view_summary, + } + + def test_simple_dependency(self, project): + self.deps_run_assert_equality(project) + check_relations_equal(project.adapter, ["seed_summary", "view_summary"]) + + project.run_sql_file(project.test_data_dir / Path("update.sql")) + self.deps_run_assert_equality(project) + + +class TestSimpleDependencyBranchWithEmpty(DependencyBranchBase): + @pytest.fixture(scope="class") + def models(self): + """extra models included""" + return { + "disabled_one.sql": models__disabled_one, + "disabled_two.sql": models__disabled_two, + "view_summary.sql": models__view_summary, + "empty.sql": models__empty, + } + + def test_empty_models_not_compiled_in_dependencies(self, project): + self.deps_run_assert_equality(project) + + models = project.get_tables_in_schema() + + assert "empty" not in models.keys() + + +class TestSimpleDependencyBadProfile(object): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "models": { + "+any_config": "{{ target.name }}", + "+enabled": "{{ target.name in ['redshift', 'postgres'] | as_bool }}", + }, + } + + # Write out the profile data as a yaml file + @pytest.fixture(scope="class", autouse=True) + def dbt_profile_target(self): + # Need to set the environment variable here initially because + # the unittest setup does a load_config. + os.environ["PROFILE_TEST_HOST"] = "localhost" + return { + "type": "postgres", + "threads": 4, + "host": "{{ env_var('PROFILE_TEST_HOST') }}", + "port": 5432, + "user": "root", + "pass": "password", + "dbname": "dbt", + } + + def test_deps_bad_profile(self, project): + del os.environ["PROFILE_TEST_HOST"] + run_dbt(["deps"]) + run_dbt(["clean"]) + + +class TestSimpleDependcyTarball(object): + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "tarball": "https://codeload.github.com/dbt-labs/dbt-utils/tar.gz/0.9.6", + "name": "dbt_utils", + } + ] + } + + def test_deps_simple_tarball_doesnt_error_out(self, project): + run_dbt(["deps"]) + assert len(os.listdir("dbt_packages")) == 1 + + +class TestBadTarballDependency(object): + def test_malformed_tarball_package_causes_exception(self, project): + # We have to specify the bad formatted package here because if we do it + # in a `packages` fixture, the test will blow up in the setup phase, meaning + # we can't appropriately catch it with a `pytest.raises` + bad_tarball_package_spec = { + "packages": [ + { + "tarball": "https://codeload.github.com/dbt-labs/dbt-utils/tar.gz/0.9.6", + "version": "dbt_utils", + } + ] + } + write_config_file(bad_tarball_package_spec, "packages.yml") + + with pytest.raises( + DbtProjectError, match=r"The packages.yml file in this project is malformed" + ) as e: + run_dbt(["deps"]) + assert e is not None diff --git a/tests/functional/dependencies/test_simple_dependency_with_configs.py b/tests/functional/dependencies/test_simple_dependency_with_configs.py new file mode 100644 index 000000000..55ecff9ad --- /dev/null +++ b/tests/functional/dependencies/test_simple_dependency_with_configs.py @@ -0,0 +1,106 @@ +from pathlib import Path + +from dbt.tests.util import check_relations_equal, run_dbt +import pytest + + +models__view_summary = """ +{{ + config( + materialized='view' + ) +}} + + +with t as ( + + select * from {{ ref('view_model') }} + +) + +select date_trunc('year', updated_at) as year, + count(*) +from t +group by 1 +""" + + +class BaseTestSimpleDependencyWithConfigs(object): + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + project.run_sql_file(project.test_data_dir / Path("seed.sql")) + + @pytest.fixture(scope="class") + def models(self): + return { + "view_summary.sql": models__view_summary, + } + + +class TestSimpleDependencyWithConfigs(BaseTestSimpleDependencyWithConfigs): + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "revision": "with-configs-1.0.0", + }, + ] + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "vars": { + "dbt_integration_project": {"bool_config": True}, + }, + } + + def test_simple_dependency(self, project): + run_dbt(["deps"]) + results = run_dbt() + assert len(results) == 5 + + check_relations_equal(project.adapter, ["seed_config_expected_1", "config"]) + check_relations_equal(project.adapter, ["seed", "table_model"]) + check_relations_equal(project.adapter, ["seed", "view_model"]) + check_relations_equal(project.adapter, ["seed", "incremental"]) + + +class TestSimpleDependencyWithOverriddenConfigs(BaseTestSimpleDependencyWithConfigs): + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "revision": "with-configs-1.0.0", + }, + ] + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "vars": { + # project-level configs + "dbt_integration_project": { + "config_1": "abc", + "config_2": "def", + "bool_config": True, + }, + }, + } + + def test_simple_dependency(self, project): + run_dbt(["deps"]) + results = run_dbt(["run"]) + len(results) == 5 + + check_relations_equal(project.adapter, ["seed_config_expected_2", "config"]) + check_relations_equal(project.adapter, ["seed", "table_model"]) + check_relations_equal(project.adapter, ["seed", "view_model"]) + check_relations_equal(project.adapter, ["seed", "incremental"]) diff --git a/tests/functional/deprecations/fixtures.py b/tests/functional/deprecations/fixtures.py new file mode 100644 index 000000000..0028f206e --- /dev/null +++ b/tests/functional/deprecations/fixtures.py @@ -0,0 +1,101 @@ +models__already_exists_sql = """ +select 1 as id + +{% if adapter.already_exists(this.schema, this.identifier) and not should_full_refresh() %} + where id > (select max(id) from {{this}}) +{% endif %} +""" + +models_trivial__model_sql = """ +select 1 as id +""" + + +bad_name_yaml = """ +version: 2 + +exposures: + - name: simple exposure spaced!! + type: dashboard + depends_on: + - ref('model') + owner: + email: something@example.com +""" + +# deprecated test config fixtures +data_tests_yaml = """ +models: + - name: model + columns: + - name: id + data_tests: + - not_null +""" + +test_type_mixed_yaml = """ +models: + - name: model + columns: + - name: id + data_tests: + - not_null + tests: + - unique +""" + +old_tests_yaml = """ +models: + - name: model + columns: + - name: id + tests: + - not_null +""" + +sources_old_tests_yaml = """ +sources: + - name: seed_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + columns: + - name: id + tests: + - unique +""" + +seed_csv = """id,name +1,Mary +2,Sam +3,John +""" + + +local_dependency__dbt_project_yml = """ + +name: 'local_dep' +version: '1.0' + +seeds: + quote_columns: False + +""" + +local_dependency__schema_yml = """ +sources: + - name: seed_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + columns: + - name: id + tests: + - unique +""" + +local_dependency__seed_csv = """id,name +1,Mary +2,Sam +3,John +""" diff --git a/tests/functional/deprecations/model_deprecations.py b/tests/functional/deprecations/model_deprecations.py new file mode 100644 index 000000000..c762e7a65 --- /dev/null +++ b/tests/functional/deprecations/model_deprecations.py @@ -0,0 +1,106 @@ +from dbt.cli.main import dbtRunner +from dbt.tests.util import run_dbt +from dbt_common.exceptions import EventCompilationError +import pytest + + +deprecated_model__yml = """ +version: 2 + +models: + - name: my_model + description: deprecated + deprecation_date: 1999-01-01 +""" + +deprecating_model__yml = """ +version: 2 + +models: + - name: my_model + description: deprecating in the future + deprecation_date: 2999-01-01 +""" + +model__sql = """ +select 1 as Id +""" + +dependant_model__sql = """ +select * from {{ ref("my_model") }} +""" + + +class TestModelDeprecationWarning: + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": model__sql, "my_schema.yml": deprecated_model__yml} + + def test_deprecation_warning(self, project): + events = [] + dbtRunner(callbacks=[events.append]).invoke(["parse"]) + matches = list([e for e in events if e.info.name == "DeprecatedModel"]) + assert len(matches) == 1 + assert matches[0].read_data.model_name == "my_model" + + def test_deprecation_warning_error(self, project): + with pytest.raises(EventCompilationError): + run_dbt(["--warn-error", "parse"]) + + def test_deprecation_warning_error_options(self, project): + with pytest.raises(EventCompilationError): + run_dbt(["--warn-error-options", '{"include": ["DeprecatedModel"]}', "parse"]) + + +class TestUpcomingReferenceDeprecatingWarning: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": model__sql, + "my_dependant_model.sql": dependant_model__sql, + "my_schema.yml": deprecating_model__yml, + } + + def test_deprecation_warning(self, project): + events = [] + dbtRunner(callbacks=[events.append]).invoke(["parse"]) + matches = list([e for e in events if e.info.name == "UpcomingReferenceDeprecation"]) + assert len(matches) == 1 + assert matches[0].read_data.model_name == "my_dependant_model" + assert matches[0].read_data.ref_model_name == "my_model" + + def test_deprecation_warning_error(self, project): + with pytest.raises(EventCompilationError): + run_dbt(["--warn-error", "parse"]) + + def test_deprecation_warning_error_options(self, project): + with pytest.raises(EventCompilationError): + run_dbt( + ["--warn-error-options", '{"include": ["UpcomingReferenceDeprecation"]}', "parse"] + ) + + +class TestDeprecatedReferenceWarning: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": model__sql, + "my_dependant_model.sql": dependant_model__sql, + "my_schema.yml": deprecated_model__yml, + } + + def test_deprecation_warning(self, project): + events = [] + dbtRunner(callbacks=[events.append]).invoke(["parse"]) + matches = list([e for e in events if e.info.name == "DeprecatedReference"]) + assert len(matches) == 1 + assert matches[0].read_data.model_name == "my_dependant_model" + assert matches[0].read_data.ref_model_name == "my_model" + + def test_deprecation_warning_error(self, project): + with pytest.raises(EventCompilationError): + run_dbt(["--warn-error", "parse"]) + + def test_deprecation_warning_error_options(self, project): + with pytest.raises(EventCompilationError): + run_dbt(["--warn-error-options", '{"include": ["DeprecatedReference"]}', "parse"]) diff --git a/tests/functional/deprecations/test_config_deprecations.py b/tests/functional/deprecations/test_config_deprecations.py new file mode 100644 index 000000000..218c795f3 --- /dev/null +++ b/tests/functional/deprecations/test_config_deprecations.py @@ -0,0 +1,148 @@ +from dbt.deprecations import active_deprecations, reset_deprecations +from dbt.exceptions import ProjectContractError, YamlParseDictError +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import run_dbt, update_config_file +from dbt_common.exceptions import CompilationError +import pytest + +import fixtures + + +# test deprecation messages +class TestTestsConfigDeprecation: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": fixtures.models_trivial__model_sql} + + @pytest.fixture(scope="class") + def project_config_update(self, unique_schema): + return {"tests": {"enabled": "true"}} + + def test_tests_config(self, project): + reset_deprecations() + assert active_deprecations == set() + run_dbt(["parse"]) + expected = {"project-test-config"} + assert expected == active_deprecations + + def test_tests_config_fail(self, project): + reset_deprecations() + assert active_deprecations == set() + with pytest.raises(CompilationError) as exc: + run_dbt(["--warn-error", "--no-partial-parse", "parse"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "The `tests` config has been renamed to `data_tests`" + assert expected_msg in exc_str + + +class TestSchemaTestDeprecation: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": fixtures.models_trivial__model_sql, + "schema.yml": fixtures.old_tests_yaml, + } + + def test_tests_config(self, project): + reset_deprecations() + assert active_deprecations == set() + run_dbt(["parse"]) + expected = {"project-test-config"} + assert expected == active_deprecations + + def test_schema_tests_fail(self, project): + reset_deprecations() + assert active_deprecations == set() + with pytest.raises(CompilationError) as exc: + run_dbt(["--warn-error", "--no-partial-parse", "parse"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "The `tests` config has been renamed to `data_tests`" + assert expected_msg in exc_str + + +class TestSourceSchemaTestDeprecation: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": fixtures.sources_old_tests_yaml} + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": fixtures.seed_csv} + + def test_source_tests_config(self, project): + reset_deprecations() + assert active_deprecations == set() + run_dbt(["seed"]) + run_dbt(["parse"]) + expected = {"project-test-config"} + assert expected == active_deprecations + + def test_schema_tests(self, project): + run_dbt(["seed"]) + results = run_dbt(["test"]) + assert len(results) == 1 + + +# test for failure with test and data_tests in the same file +class TestBothSchemaTestDeprecation: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": fixtures.models_trivial__model_sql, + "schema.yml": fixtures.test_type_mixed_yaml, + } + + def test_schema(self, project): + expected_msg = "Invalid test config: cannot have both 'tests' and 'data_tests' defined" + with pytest.raises(YamlParseDictError) as excinfo: + run_dbt(["parse"]) + assert expected_msg in str(excinfo.value) + + +# test for failure with test and data_tests in the same dbt_project.yml +class TestBothProjectTestDeprecation: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": fixtures.models_trivial__model_sql} + + def test_tests_config(self, project): + config_patch = {"tests": {"+enabled": "true"}, "data_tests": {"+tags": "super"}} + update_config_file(config_patch, project.project_root, "dbt_project.yml") + + expected_msg = "Invalid project config: cannot have both 'tests' and 'data_tests' defined" + with pytest.raises(ProjectContractError) as excinfo: + run_dbt(["parse"]) + assert expected_msg in str(excinfo.value) + + +# test a local dependency can have tests while the rest of the project uses data_tests +class TestTestConfigInDependency: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": fixtures.local_dependency__dbt_project_yml, + "models": { + "schema.yml": fixtures.local_dependency__schema_yml, + }, + "seeds": {"seed.csv": fixtures.local_dependency__seed_csv}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": fixtures.models_trivial__model_sql, + "schema.yml": fixtures.data_tests_yaml, + } + + def test_test_dep(self, project): + run_dbt(["deps"]) + run_dbt(["seed"]) + run_dbt(["run"]) + results = run_dbt(["test"]) + # 1 data_test in the dep and 1 in the project + assert len(results) == 2 diff --git a/tests/functional/deprecations/test_deprecations.py b/tests/functional/deprecations/test_deprecations.py new file mode 100644 index 000000000..185157a0c --- /dev/null +++ b/tests/functional/deprecations/test_deprecations.py @@ -0,0 +1,148 @@ +from dbt.deprecations import active_deprecations, reset_deprecations +from dbt.tests.util import run_dbt, write_file +from dbt_common.exceptions import CompilationError +import pytest +import yaml + +import fixtures + + +class TestConfigPathDeprecation: + @pytest.fixture(scope="class") + def models(self): + return {"already_exists.sql": fixtures.models_trivial__model_sql} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "data-paths": ["data"], + "log-path": "customlogs", + "target-path": "customtarget", + } + + def test_data_path(self, project): + reset_deprecations() + assert active_deprecations == set() + run_dbt(["debug"]) + expected = { + "project-config-data-paths", + "project-config-log-path", + "project-config-target-path", + } + assert expected == active_deprecations + + def test_data_path_fail(self, project): + reset_deprecations() + assert active_deprecations == set() + with pytest.raises(CompilationError) as exc: + run_dbt(["--warn-error", "debug"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "The `data-paths` config has been renamed" + assert expected_msg in exc_str + + +class TestPackageInstallPathDeprecation: + @pytest.fixture(scope="class") + def models_trivial(self): + return {"model.sql": fixtures.models_trivial__model_sql} + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"config-version": 2, "clean-targets": ["dbt_modules"]} + + def test_package_path(self, project): + reset_deprecations() + assert active_deprecations == set() + run_dbt(["clean"]) + expected = {"install-packages-path"} + assert expected == active_deprecations + + def test_package_path_not_set(self, project): + reset_deprecations() + assert active_deprecations == set() + with pytest.raises(CompilationError) as exc: + run_dbt(["--warn-error", "clean"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "path has changed from `dbt_modules` to `dbt_packages`." + assert expected_msg in exc_str + + +class TestPackageRedirectDeprecation: + @pytest.fixture(scope="class") + def models(self): + return {"already_exists.sql": fixtures.models_trivial__model_sql} + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"package": "fishtown-analytics/dbt_utils", "version": "0.7.0"}]} + + def test_package_redirect(self, project): + reset_deprecations() + assert active_deprecations == set() + run_dbt(["deps"]) + expected = {"package-redirect"} + assert expected == active_deprecations + + # if this test comes before test_package_redirect it will raise an exception as expected + def test_package_redirect_fail(self, project): + reset_deprecations() + assert active_deprecations == set() + with pytest.raises(CompilationError) as exc: + run_dbt(["--warn-error", "deps"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "The `fishtown-analytics/dbt_utils` package is deprecated in favor of `dbt-labs/dbt_utils`" + assert expected_msg in exc_str + + +class TestExposureNameDeprecation: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": fixtures.models_trivial__model_sql, + "bad_name.yml": fixtures.bad_name_yaml, + } + + def test_exposure_name(self, project): + reset_deprecations() + assert active_deprecations == set() + run_dbt(["parse"]) + expected = {"exposure-name"} + assert expected == active_deprecations + + def test_exposure_name_fail(self, project): + reset_deprecations() + assert active_deprecations == set() + with pytest.raises(CompilationError) as exc: + run_dbt(["--warn-error", "--no-partial-parse", "parse"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "Starting in v1.3, the 'name' of an exposure should contain only letters, numbers, and underscores." + assert expected_msg in exc_str + + +class TestPrjectFlagsMovedDeprecation: + @pytest.fixture(scope="class") + def profiles_config_update(self): + return { + "config": {"send_anonymous_usage_stats": False}, + } + + @pytest.fixture(scope="class") + def dbt_project_yml(self, project_root, project_config_update): + project_config = { + "name": "test", + "profile": "test", + } + write_file(yaml.safe_dump(project_config), project_root, "dbt_project.yml") + return project_config + + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": "select 1 as fun"} + + def test_profile_config_deprecation(self, project): + reset_deprecations() + assert active_deprecations == set() + run_dbt(["parse"]) + expected = {"project-flags-moved"} + assert expected == active_deprecations diff --git a/tests/functional/docs/test_duplicate_docs_block.py b/tests/functional/docs/test_duplicate_docs_block.py new file mode 100644 index 000000000..393980e27 --- /dev/null +++ b/tests/functional/docs/test_duplicate_docs_block.py @@ -0,0 +1,34 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +duplicate_doc_blocks_model_sql = "select 1 as id, 'joe' as first_name" + +duplicate_doc_blocks_docs_md = """{% docs my_model_doc %} + a doc string +{% enddocs %} + +{% docs my_model_doc %} + duplicate doc string +{% enddocs %}""" + +duplicate_doc_blocks_schema_yml = """version: 2 + +models: + - name: model + description: "{{ doc('my_model_doc') }}" +""" + + +class TestDuplicateDocsBlock: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": duplicate_doc_blocks_model_sql, + "schema.yml": duplicate_doc_blocks_schema_yml, + } + + def test_duplicate_doc_ref(self, project): + with pytest.raises(CompilationError): + run_dbt(expect_pass=False) diff --git a/tests/functional/docs/test_generate.py b/tests/functional/docs/test_generate.py new file mode 100644 index 000000000..1da96f5ba --- /dev/null +++ b/tests/functional/docs/test_generate.py @@ -0,0 +1,100 @@ +from dbt.tests.util import get_manifest, run_dbt +import pytest + + +sample_seed = """sample_num,sample_bool +1,true +2,false +3,true +""" + +second_seed = """sample_num,sample_bool +4,true +5,false +6,true +""" + +sample_config = """ +sources: + - name: my_seed + schema: "{{ target.schema }}" + tables: + - name: sample_seed + - name: second_seed + - name: fake_seed +""" + + +class TestBaseGenerate: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": "select 1 as fun", + "alt_model.sql": "select 1 as notfun", + "sample_config.yml": sample_config, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "sample_seed.csv": sample_seed, + "second_seed.csv": sample_seed, + } + + +class TestGenerateManifestNotCompiled(TestBaseGenerate): + def test_manifest_not_compiled(self, project): + run_dbt(["docs", "generate", "--no-compile"]) + # manifest.json is written out in parsing now, but it + # shouldn't be compiled because of the --no-compile flag + manifest = get_manifest(project.project_root) + model_id = "model.test.my_model" + assert model_id in manifest.nodes + assert manifest.nodes[model_id].compiled is False + + +class TestGenerateEmptyCatalog(TestBaseGenerate): + def test_generate_empty_catalog(self, project): + catalog = run_dbt(["docs", "generate", "--empty-catalog"]) + assert catalog.nodes == {}, "nodes should be empty" + assert catalog.sources == {}, "sources should be empty" + assert catalog.errors is None, "errors should be null" + + +class TestGenerateSelectLimitsCatalog(TestBaseGenerate): + def test_select_limits_catalog(self, project): + run_dbt(["run"]) + catalog = run_dbt(["docs", "generate", "--select", "my_model"]) + assert len(catalog.nodes) == 1 + assert "model.test.my_model" in catalog.nodes + + +class TestGenerateSelectLimitsNoMatch(TestBaseGenerate): + def test_select_limits_no_match(self, project): + run_dbt(["run"]) + catalog = run_dbt(["docs", "generate", "--select", "my_missing_model"]) + assert len(catalog.nodes) == 0 + + +class TestGenerateCatalogWithSources(TestBaseGenerate): + def test_catalog_with_sources(self, project): + run_dbt(["build"]) + catalog = run_dbt(["docs", "generate"]) + + # 2 seeds + 2 models + assert len(catalog.nodes) == 4 + # 2 sources (only ones that exist) + assert len(catalog.sources) == 2 + + +class TestGenerateSelectSource(TestBaseGenerate): + def test_select_source(self, project): + run_dbt(["build"]) + catalog = run_dbt(["docs", "generate", "--select", "source:test.my_seed.sample_seed"]) + + # 2 seeds + # TODO: Filtering doesn't work for seeds + assert len(catalog.nodes) == 2 + # 2 sources + # TODO: Filtering doesn't work for sources + assert len(catalog.sources) == 2 diff --git a/tests/functional/docs/test_good_docs_blocks.py b/tests/functional/docs/test_good_docs_blocks.py new file mode 100644 index 000000000..d1ab0f5a1 --- /dev/null +++ b/tests/functional/docs/test_good_docs_blocks.py @@ -0,0 +1,177 @@ +import json +import os +from pathlib import Path + +from dbt.tests.util import run_dbt, update_config_file, write_file +import pytest + + +good_docs_blocks_model_sql = "select 1 as id, 'joe' as first_name" + +good_docs_blocks_docs_md = """{% docs my_model_doc %} +My model is just a copy of the seed +{% enddocs %} + +{% docs my_model_doc__id %} +The user ID number +{% enddocs %} + +The following doc is never used, which should be fine. +{% docs my_model_doc__first_name %} +The user's first name (should not be shown!) +{% enddocs %} + +This doc is referenced by its full name +{% docs my_model_doc__last_name %} +The user's last name +{% enddocs %} +""" + +good_doc_blocks_alt_docs_md = """{% docs my_model_doc %} +Alt text about the model +{% enddocs %} + +{% docs my_model_doc__id %} +The user ID number with alternative text +{% enddocs %} + +The following doc is never used, which should be fine. +{% docs my_model_doc__first_name %} +The user's first name - don't show this text! +{% enddocs %} + +This doc is referenced by its full name +{% docs my_model_doc__last_name %} +The user's last name in this other file +{% enddocs %} +""" + +good_docs_blocks_schema_yml = """version: 2 + +models: + - name: model + description: "{{ doc('my_model_doc') }}" + columns: + - name: id + description: "{{ doc('my_model_doc__id') }}" + - name: first_name + description: The user's first name + - name: last_name + description: "{{ doc('test', 'my_model_doc__last_name') }}" +""" + + +class TestGoodDocsBlocks: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": good_docs_blocks_model_sql, + "schema.yml": good_docs_blocks_schema_yml, + "docs.md": good_docs_blocks_docs_md, + } + + def test_valid_doc_ref(self, project): + result = run_dbt() + assert len(result.results) == 1 + + assert os.path.exists("./target/manifest.json") + + with open("./target/manifest.json") as fp: + manifest = json.load(fp) + + model_data = manifest["nodes"]["model.test.model"] + + assert model_data["description"] == "My model is just a copy of the seed" + + assert { + "name": "id", + "description": "The user ID number", + "data_type": None, + "constraints": [], + "meta": {}, + "quote": None, + "tags": [], + } == model_data["columns"]["id"] + + assert { + "name": "first_name", + "description": "The user's first name", + "data_type": None, + "constraints": [], + "meta": {}, + "quote": None, + "tags": [], + } == model_data["columns"]["first_name"] + + assert { + "name": "last_name", + "description": "The user's last name", + "data_type": None, + "constraints": [], + "meta": {}, + "quote": None, + "tags": [], + } == model_data["columns"]["last_name"] + + assert len(model_data["columns"]) == 3 + + +class TestGoodDocsBlocksAltPath: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": good_docs_blocks_model_sql, "schema.yml": good_docs_blocks_schema_yml} + + def test_alternative_docs_path(self, project): + # self.use_default_project({"docs-paths": [self.dir("docs")]}) + docs_path = Path(project.project_root, "alt-docs") + docs_path.mkdir() + write_file(good_doc_blocks_alt_docs_md, project.project_root, "alt-docs", "docs.md") + + update_config_file( + {"docs-paths": [str(docs_path)]}, project.project_root, "dbt_project.yml" + ) + + result = run_dbt() + + assert len(result.results) == 1 + + assert os.path.exists("./target/manifest.json") + + with open("./target/manifest.json") as fp: + manifest = json.load(fp) + + model_data = manifest["nodes"]["model.test.model"] + + assert model_data["description"] == "Alt text about the model" + + assert { + "name": "id", + "description": "The user ID number with alternative text", + "data_type": None, + "constraints": [], + "meta": {}, + "quote": None, + "tags": [], + } == model_data["columns"]["id"] + + assert { + "name": "first_name", + "description": "The user's first name", + "data_type": None, + "constraints": [], + "meta": {}, + "quote": None, + "tags": [], + } == model_data["columns"]["first_name"] + + assert { + "name": "last_name", + "description": "The user's last name in this other file", + "data_type": None, + "constraints": [], + "meta": {}, + "quote": None, + "tags": [], + } == model_data["columns"]["last_name"] + + assert len(model_data["columns"]) == 3 diff --git a/tests/functional/docs/test_invalid_doc_ref.py b/tests/functional/docs/test_invalid_doc_ref.py new file mode 100644 index 000000000..eda72d64f --- /dev/null +++ b/tests/functional/docs/test_invalid_doc_ref.py @@ -0,0 +1,46 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +invalid_doc_ref_model_sql = "select 1 as id, 'joe' as first_name" + +invalid_doc_ref_docs_md = """{% docs my_model_doc %} +My model is just a copy of the seed +{% enddocs %} + +{% docs my_model_doc__id %} +The user ID number +{% enddocs %} + +The following doc is never used, which should be fine. +{% docs my_model_doc__first_name %} +The user's first name +{% enddocs %}""" + +invalid_doc_ref_schema_yml = """version: 2 + +models: + - name: model + description: "{{ doc('my_model_doc') }}" + columns: + - name: id + description: "{{ doc('my_model_doc__id') }}" + - name: first_name + description: "{{ doc('foo.bar.my_model_doc__id') }}" +""" + + +class TestInvalidDocRef: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": invalid_doc_ref_model_sql, + "docs.md": invalid_doc_ref_docs_md, + "schema.yml": invalid_doc_ref_schema_yml, + } + + def test_invalid_doc_ref(self, project): + # The run should fail since we could not find the docs reference. + with pytest.raises(CompilationError): + run_dbt(expect_pass=False) diff --git a/tests/functional/docs/test_missing_docs_blocks.py b/tests/functional/docs/test_missing_docs_blocks.py new file mode 100644 index 000000000..1aa231681 --- /dev/null +++ b/tests/functional/docs/test_missing_docs_blocks.py @@ -0,0 +1,42 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +missing_docs_blocks_model_sql = "select 1 as id, 'joe' as first_name" + +missing_docs_blocks_docs_md = """{% docs my_model_doc %} +My model is just a copy of the seed +{% enddocs %} + +{% docs my_model_doc__id %} +The user ID number +{% enddocs %}""" + +missing_docs_blocks_schema_yml = """version: 2 + +models: + - name: model + description: "{{ doc('my_model_doc') }}" + columns: + - name: id + description: "{{ doc('my_model_doc__id') }}" + - name: first_name + # invalid reference + description: "{{ doc('my_model_doc__first_name') }}" +""" + + +class TestMissingDocsBlocks: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": missing_docs_blocks_model_sql, + "schema.yml": missing_docs_blocks_schema_yml, + "docs.md": missing_docs_blocks_docs_md, + } + + def test_missing_doc_ref(self, project): + # The run should fail since we could not find the docs reference. + with pytest.raises(CompilationError): + run_dbt() diff --git a/tests/functional/docs/test_model_version_docs_blocks.py b/tests/functional/docs/test_model_version_docs_blocks.py new file mode 100644 index 000000000..be6100810 --- /dev/null +++ b/tests/functional/docs/test_model_version_docs_blocks.py @@ -0,0 +1,74 @@ +from dbt.tests.util import run_dbt +import pytest + + +model_1 = """ +select 1 as id, 'joe' as first_name +""" + +model_versioned = """ +select 1 as id, 'joe' as first_name +""" + +docs_md = """ +{% docs model_description %} +unversioned model +{% enddocs %} + +{% docs column_id_doc %} +column id for some thing +{% enddocs %} + +{% docs versioned_model_description %} +versioned model +{% enddocs %} + +""" + +schema_yml = """ +models: + - name: model_1 + description: '{{ doc("model_description") }}' + columns: + - name: id + description: '{{ doc("column_id_doc") }}' + + - name: model_versioned + description: '{{ doc("versioned_model_description") }}' + latest_version: 1 + versions: + - v: 1 + config: + alias: my_alias + columns: + - name: id + description: '{{ doc("column_id_doc") }}' + - name: first_name + description: 'plain text' + - v: 2 + columns: + - name: other_id +""" + + +class TestVersionedModelDocsBlock: + @pytest.fixture(scope="class") + def models(self): + return { + "model_1.sql": model_1, + "model_versioned.sql": model_versioned, + "schema.yml": schema_yml, + "docs.md": docs_md, + } + + def test_versioned_doc_ref(self, project): + manifest = run_dbt(["parse"]) + model_1 = manifest.nodes["model.test.model_1"] + model_v1 = manifest.nodes["model.test.model_versioned.v1"] + + assert model_1.description == "unversioned model" + assert model_v1.description == "versioned model" + + assert model_1.columns["id"].description == "column id for some thing" + assert model_v1.columns["id"].description == "column id for some thing" + assert model_v1.columns["first_name"].description == "plain text" diff --git a/tests/functional/docs/test_static.py b/tests/functional/docs/test_static.py new file mode 100644 index 000000000..589668bd9 --- /dev/null +++ b/tests/functional/docs/test_static.py @@ -0,0 +1,50 @@ +import os + +from dbt.task.docs import DOCS_INDEX_FILE_PATH +from dbt.tests.util import run_dbt +from dbt_common.clients.system import load_file_contents +import pytest + + +class TestStaticGenerate: + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": "select 1 as fun"} + + def test_static_generated(self, project): + run_dbt(["docs", "generate", "--static"]) + + source_index_html = load_file_contents(DOCS_INDEX_FILE_PATH) + + target_index_html = load_file_contents( + os.path.join(project.project_root, "target", "index.html") + ) + + # Validate index.html was copied correctly + assert len(target_index_html) == len(source_index_html) + assert hash(target_index_html) == hash(source_index_html) + + manifest_data = load_file_contents( + os.path.join(project.project_root, "target", "manifest.json") + ) + + catalog_data = load_file_contents( + os.path.join(project.project_root, "target", "catalog.json") + ) + + static_index_html = load_file_contents( + os.path.join(project.project_root, "target", "static_index.html") + ) + + # Calculate expected static_index.html + expected_static_index_html = source_index_html + expected_static_index_html = expected_static_index_html.replace( + '"MANIFEST.JSON INLINE DATA"', manifest_data + ) + expected_static_index_html = expected_static_index_html.replace( + '"CATALOG.JSON INLINE DATA"', catalog_data + ) + + # Validate static_index.html was generated correctly + assert len(expected_static_index_html) == len(static_index_html) + assert hash(expected_static_index_html) == hash(static_index_html) diff --git a/tests/functional/duplicates/test_duplicate_analysis.py b/tests/functional/duplicates/test_duplicate_analysis.py new file mode 100644 index 000000000..742320406 --- /dev/null +++ b/tests/functional/duplicates/test_duplicate_analysis.py @@ -0,0 +1,32 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +my_model_sql = """ +select 1 as id +""" + +my_analysis_sql = """ +select * from {{ ref('my_model') }} +""" + + +class TestDuplicateAnalysis: + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": my_model_sql} + + @pytest.fixture(scope="class") + def analyses(self): + return { + "anlysis-1": {"model.sql": my_analysis_sql}, + "anlysis-2": {"model.sql": my_analysis_sql}, + } + + def test_duplicate_model_enabled(self, project): + message = "dbt found two analyses with the name" + with pytest.raises(CompilationError) as exc: + run_dbt(["compile"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + assert message in exc_str diff --git a/tests/functional/duplicates/test_duplicate_exposure.py b/tests/functional/duplicates/test_duplicate_exposure.py new file mode 100644 index 000000000..9ece78f91 --- /dev/null +++ b/tests/functional/duplicates/test_duplicate_exposure.py @@ -0,0 +1,30 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +exposure_dupes_schema_yml = """ +version: 2 +exposures: + - name: something + type: dashboard + owner: + email: test@example.com + - name: something + type: dashboard + owner: + email: test@example.com + +""" + + +class TestDuplicateExposure: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": exposure_dupes_schema_yml} + + def test_duplicate_exposure(self, project): + message = "dbt found two exposures with the name" + with pytest.raises(CompilationError) as exc: + run_dbt(["compile"]) + assert message in str(exc.value) diff --git a/tests/functional/duplicates/test_duplicate_macro.py b/tests/functional/duplicates/test_duplicate_macro.py new file mode 100644 index 000000000..5c910e468 --- /dev/null +++ b/tests/functional/duplicates/test_duplicate_macro.py @@ -0,0 +1,71 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +bad_same_macros_sql = """ +{% macro some_macro() %} +{% endmacro %} + +{% macro some_macro() %} +{% endmacro %} + +""" + +bad_separate_one_sql = """ +{% macro some_macro() %} +{% endmacro %} + +""" + +bad_separate_two_sql = """ +{% macro some_macro() %} +{% endmacro %} + +""" + +model_sql = """ +select 1 as value +""" + + +class TestDuplicateMacroEnabledSameFile: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": model_sql} + + @pytest.fixture(scope="class") + def macros(self): + return { + "macro.sql": bad_same_macros_sql, + } + + def test_duplicate_macros(self, project): + message = 'dbt found two macros named "some_macro" in the project' + with pytest.raises(CompilationError) as exc: + run_dbt(["parse"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + assert message in exc_str + assert "macro.sql" in exc_str + + +class TestDuplicateMacroEnabledDifferentFiles: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": model_sql} + + @pytest.fixture(scope="class") + def macros(self): + return { + "one.sql": bad_separate_one_sql, + "two.sql": bad_separate_two_sql, + } + + def test_duplicate_macros(self, project): + message = 'dbt found two macros named "some_macro" in the project' + with pytest.raises(CompilationError) as exc: + run_dbt(["compile"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + assert message in exc_str + assert "one.sql" in exc_str + assert "two.sql" in exc_str diff --git a/tests/functional/duplicates/test_duplicate_metric.py b/tests/functional/duplicates/test_duplicate_metric.py new file mode 100644 index 000000000..676edc6b5 --- /dev/null +++ b/tests/functional/duplicates/test_duplicate_metric.py @@ -0,0 +1,40 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +metric_dupes_schema_yml = """ +version: 2 + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: "people" + meta: + my_meta: 'testing' + + - name: number_of_people + label: "Collective tenure" + description: Total number of years of team experience + type: simple + type_params: + measure: + name: "years_tenure" + filter: "{{ Dimension('people_entity__loves_dbt') }} is true" +""" + + +class TestDuplicateMetric: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": metric_dupes_schema_yml} + + def test_duplicate_metric(self, project): + message = "dbt found two metrics with the name" + with pytest.raises(CompilationError) as exc: + run_dbt(["compile"]) + assert message in str(exc.value) diff --git a/tests/functional/duplicates/test_duplicate_model.py b/tests/functional/duplicates/test_duplicate_model.py new file mode 100644 index 000000000..650d54d29 --- /dev/null +++ b/tests/functional/duplicates/test_duplicate_model.py @@ -0,0 +1,263 @@ +from dbt.exceptions import AmbiguousAliasError +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import get_manifest, run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +disabled_model_sql = """ +{{ + config( + enabled=False, + materialized="table", + ) +}} + +select 1 + +""" + +enabled_model_sql = """ +{{ + config( + enabled=True, + materialized="table", + ) +}} + +select 1 as value + +""" + +dbt_project_yml = """ +name: 'local_dep' +version: '1.0' +config-version: 2 + +profile: 'default' + +model-paths: ["models"] + +seeds: + quote_columns: False + +""" + +local_dep_schema_yml = """ +models: + - name: table_model + config: + alias: table_model_local_dep + columns: + - name: id + data_tests: + - unique +""" + +local_dep_versions_schema_yml = """ +models: + - name: table_model + config: + alias: table_model_local_dep + versions: + - v: 1 +""" + + +class TestDuplicateModelEnabled: + @pytest.fixture(scope="class") + def models(self): + return { + "model-enabled-1": {"model.sql": enabled_model_sql}, + "model-enabled-2": {"model.sql": enabled_model_sql}, + } + + def test_duplicate_model_enabled(self, project): + message = "dbt found two models with the name" + with pytest.raises(CompilationError) as exc: + run_dbt(["compile"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + assert message in exc_str + + +class TestDuplicateModelDisabled: + @pytest.fixture(scope="class") + def models(self): + return { + "model-disabled": {"model.sql": disabled_model_sql}, + "model-enabled": {"model.sql": enabled_model_sql}, + } + + def test_duplicate_model_disabled(self, project): + results = run_dbt(["compile"]) + assert len(results) == 1 + + manifest = get_manifest(project.project_root) + + model_id = "model.test.model" + assert model_id in manifest.nodes + assert model_id in manifest.disabled + + def test_duplicate_model_disabled_partial_parsing(self, project): + run_dbt(["clean"]) + results = run_dbt(["--partial-parse", "compile"]) + assert len(results) == 1 + results = run_dbt(["--partial-parse", "compile"]) + assert len(results) == 1 + results = run_dbt(["--partial-parse", "compile"]) + assert len(results) == 1 + + +class TestDuplicateModelAliasEnabledAcrossPackages: + @pytest.fixture(scope="class") + def models(self): + return {"table_model.sql": enabled_model_sql} + + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": dbt_project_yml, + "models": {"table_model.sql": enabled_model_sql}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + def test_duplicate_model_alias_enabled_across_packages(self, project): + run_dbt(["deps"]) + message = "dbt found two resources with the database representation" + with pytest.raises(AmbiguousAliasError) as exc: + run_dbt(["run"]) + assert message in str(exc.value) + + +class TestDuplicateModelDisabledAcrossPackages: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": dbt_project_yml, + "models": {"table_model.sql": enabled_model_sql}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def models(self): + return {"table_model.sql": disabled_model_sql} + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + def test_duplicate_model_disabled_across_packages(self, project): + run_dbt(["deps"]) + results = run_dbt(["compile"]) + assert len(results) == 1 + + manifest = get_manifest(project.project_root) + local_dep_model_id = "model.local_dep.table_model" + model_id = "model.test.table_model" + assert local_dep_model_id in manifest.nodes + assert model_id in manifest.disabled + + +class TestDuplicateModelNameWithTestAcrossPackages: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": dbt_project_yml, + "models": {"table_model.sql": enabled_model_sql, "schema.yml": local_dep_schema_yml}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def models(self): + return {"table_model.sql": enabled_model_sql} + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + def test_duplicate_model_name_with_test_across_packages(self, project): + run_dbt(["deps"]) + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 3 + + # model nodes with duplicate names exist + local_dep_model_node_id = "model.local_dep.table_model" + root_model_node_id = "model.test.table_model" + assert local_dep_model_node_id in manifest.nodes + assert root_model_node_id in manifest.nodes + + # test node exists and is attached to correct node + test_node_id = "test.local_dep.unique_table_model_id.1da9e464d9" + assert test_node_id in manifest.nodes + assert manifest.nodes[test_node_id].attached_node == local_dep_model_node_id + + +class TestDuplicateModelNameWithVersionAcrossPackages: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": dbt_project_yml, + "models": { + "table_model.sql": enabled_model_sql, + "schema.yml": local_dep_versions_schema_yml, + }, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def models(self): + return {"table_model.sql": enabled_model_sql} + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + def test_duplicate_model_name_with_test_across_packages(self, project): + run_dbt(["deps"]) + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 2 + + # model nodes with duplicate names exist + local_dep_model_node_id = "model.local_dep.table_model.v1" + root_model_node_id = "model.test.table_model" + assert local_dep_model_node_id in manifest.nodes + assert root_model_node_id in manifest.nodes + + +class TestModelTestOverlap: + @pytest.fixture(scope="class") + def models(self): + return {"table_model.sql": enabled_model_sql} + + @property + def project_config(self): + return { + "config-version": 2, + "test-paths": ["models"], + } + + def test_duplicate_test_model_paths(self, project): + # this should be ok: test/model overlap is fine + run_dbt(["compile"]) + run_dbt(["--partial-parse", "compile"]) + run_dbt(["--partial-parse", "compile"]) + + +class TestMultipleDisabledModels: + @pytest.fixture(scope="class") + def models(self): + return { + "subdir3": {"model_alt.sql": disabled_model_sql}, + "subdir2": {"model_alt.sql": disabled_model_sql}, + "subdir1": {"model_alt.sql": enabled_model_sql}, + } + + def test_multiple_disabled_models(self, project): + run_dbt(["compile"]) + manifest = get_manifest(project.project_root) + model_id = "model.test.model_alt" + assert model_id in manifest.nodes diff --git a/tests/functional/duplicates/test_duplicate_resource.py b/tests/functional/duplicates/test_duplicate_resource.py new file mode 100644 index 000000000..36ab4d919 --- /dev/null +++ b/tests/functional/duplicates/test_duplicate_resource.py @@ -0,0 +1,33 @@ +from dbt.tests.util import run_dbt +import pytest + + +models_naming_dupes_schema_yml = """ +version: 2 +models: + - name: something + description: This table has basic information about orders, as well as some derived facts based on payments +exposure: + - name: something + +""" + +something_model_sql = """ + +select 1 as item + +""" + + +class TestDuplicateSchemaResource: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_naming_dupes_schema_yml, + "something.sql": something_model_sql, + } + + # a model and an exposure can share the same name + def test_duplicate_model_and_exposure(self, project): + result = run_dbt(["compile"]) + assert len(result) == 1 diff --git a/tests/functional/duplicates/test_duplicate_source.py b/tests/functional/duplicates/test_duplicate_source.py new file mode 100644 index 000000000..adb9dee4c --- /dev/null +++ b/tests/functional/duplicates/test_duplicate_source.py @@ -0,0 +1,26 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +source_dupes_schema_yml = """ +version: 2 +sources: + - name: something + tables: + - name: dupe + - name: dupe + +""" + + +class TestDuplicateSourceEnabled: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": source_dupes_schema_yml} + + def test_duplicate_source_enabled(self, project): + message = "dbt found two sources with the name" + with pytest.raises(CompilationError) as exc: + run_dbt(["compile"]) + assert message in str(exc.value) diff --git a/tests/functional/exit_codes/fixtures.py b/tests/functional/exit_codes/fixtures.py new file mode 100644 index 000000000..10f5c2048 --- /dev/null +++ b/tests/functional/exit_codes/fixtures.py @@ -0,0 +1,65 @@ +bad_sql = """ +select bad sql here +""" + +dupe_sql = """ +select 1 as id, current_date as updated_at +union all +select 2 as id, current_date as updated_at +union all +select 3 as id, current_date as updated_at +union all +select 4 as id, current_date as updated_at +""" + +good_sql = """ +select 1 as id, current_date as updated_at +union all +select 2 as id, current_date as updated_at +union all +select 3 as id, current_date as updated_at +union all +select 4 as id, current_date as updated_at +""" + +snapshots_good_sql = """ +{% snapshot good_snapshot %} + {{ config(target_schema=schema, target_database=database, strategy='timestamp', unique_key='id', updated_at='updated_at')}} + select * from {{ schema }}.good +{% endsnapshot %} +""" + +snapshots_bad_sql = """ +{% snapshot good_snapshot %} + {{ config(target_schema=schema, target_database=database, strategy='timestamp', unique_key='id', updated_at='updated_at_not_real')}} + select * from {{ schema }}.good +{% endsnapshot %} +""" + +schema_yml = """ +version: 2 +models: +- name: good + columns: + - name: updated_at + data_tests: + - not_null +- name: bad + columns: + - name: updated_at + data_tests: + - not_null +- name: dupe + columns: + - name: updated_at + data_tests: + - unique +""" + +data_seed_good_csv = """a,b,c +1,2,3 +""" + +data_seed_bad_csv = """a,b,c +1,\2,3,a,a,a +""" diff --git a/tests/functional/exit_codes/test_exit_codes.py b/tests/functional/exit_codes/test_exit_codes.py new file mode 100644 index 000000000..d237a6700 --- /dev/null +++ b/tests/functional/exit_codes/test_exit_codes.py @@ -0,0 +1,129 @@ +from dbt.exceptions import GitCheckoutError +from dbt.tests.util import ( + check_table_does_exist, + check_table_does_not_exist, + run_dbt, +) +import pytest + +import fixtures + + +class BaseConfigProject: + @pytest.fixture(scope="class") + def models(self): + return { + "bad.sql": fixtures.bad_sql, + "dupe.sql": fixtures.dupe_sql, + "good.sql": fixtures.good_sql, + "schema.yml": fixtures.schema_yml, + } + + +class TestExitCodes(BaseConfigProject): + @pytest.fixture(scope="class") + def snapshots(self): + return {"g.sql": fixtures.snapshots_good_sql} + + def test_exit_code_run_succeed(self, project): + results = run_dbt(["run", "--model", "good"]) + assert len(results) == 1 + check_table_does_exist(project.adapter, "good") + + def test_exit_code_run_fail(self, project): + results = run_dbt(["run", "--model", "bad"], expect_pass=False) + assert len(results) == 1 + check_table_does_not_exist(project.adapter, "bad") + + def test_schema_test_pass(self, project): + results = run_dbt(["run", "--model", "good"]) + assert len(results) == 1 + + results = run_dbt(["test", "--model", "good"]) + assert len(results) == 1 + + def test_schema_test_fail(self, project): + results = run_dbt(["run", "--model", "dupe"]) + assert len(results) == 1 + + results = run_dbt(["test", "--model", "dupe"], expect_pass=False) + assert len(results) == 1 + + def test_compile(self, project): + results = run_dbt(["compile"]) + assert len(results) == 7 + + def test_snapshot_pass(self, project): + run_dbt(["run", "--model", "good"]) + results = run_dbt(["snapshot"]) + assert len(results) == 1 + check_table_does_exist(project.adapter, "good_snapshot") + + +class TestExitCodesSnapshotFail(BaseConfigProject): + @pytest.fixture(scope="class") + def snapshots(self): + return {"b.sql": fixtures.snapshots_bad_sql} + + def test_snapshot_fail(self, project): + results = run_dbt(["run", "--model", "good"]) + assert len(results) == 1 + + results = run_dbt(["snapshot"], expect_pass=False) + assert len(results) == 1 + check_table_does_not_exist(project.adapter, "good_snapshot") + + +class TestExitCodesDeps: + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "revision": "dbt/1.0.0", + } + ] + } + + def test_deps(self, project): + results = run_dbt(["deps"]) + assert results is None + + +class TestExitCodesDepsFail: + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "revision": "bad-branch", + }, + ] + } + + def test_deps_fail(self, project): + with pytest.raises(GitCheckoutError) as exc: + run_dbt(["deps"]) + expected_msg = "Error checking out spec='bad-branch'" + assert expected_msg in str(exc.value) + + +class TestExitCodesSeed: + @pytest.fixture(scope="class") + def seeds(self): + return {"good.csv": fixtures.data_seed_good_csv} + + def test_seed(self, project): + results = run_dbt(["seed"]) + assert len(results) == 1 + + +class TestExitCodesSeedFail: + @pytest.fixture(scope="class") + def seeds(self): + return {"bad.csv": fixtures.data_seed_bad_csv} + + def test_seed(self, project): + run_dbt(["seed"], expect_pass=False) diff --git a/tests/functional/exposures/fixtures.py b/tests/functional/exposures/fixtures.py new file mode 100644 index 000000000..809df9e90 --- /dev/null +++ b/tests/functional/exposures/fixtures.py @@ -0,0 +1,161 @@ +models_sql = """ +select 1 as id +""" + +second_model_sql = """ +select 1 as id +""" + + +metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day +""" + + +source_schema_yml = """version: 2 + +sources: + - name: test_source + tables: + - name: test_table +""" + + +semantic_models_schema_yml = """version: 2 + +semantic_models: + - name: semantic_model + model: ref('model') + dimensions: + - name: created_at + type: time + measures: + - name: distinct_metrics + agg: count_distinct + expr: id + entities: + - name: model + type: primary + expr: id + defaults: + agg_time_dimension: created_at +""" + + +metrics_schema_yml = """version: 2 + +metrics: + - name: metric + label: "label" + type: simple + type_params: + measure: "distinct_metrics" +""" + +simple_exposure_yml = """ +version: 2 + +exposures: + - name: simple_exposure + label: simple exposure label + type: dashboard + depends_on: + - ref('model') + - source('test_source', 'test_table') + - metric('metric') + owner: + email: something@example.com + - name: notebook_exposure + type: notebook + depends_on: + - ref('model') + - ref('second_model') + owner: + email: something@example.com + name: Some name + description: > + A description of the complex exposure + maturity: medium + meta: + tool: 'my_tool' + languages: + - python + tags: ['my_department'] + url: http://example.com/notebook/1 +""" + +disabled_models_exposure_yml = """ +version: 2 + +exposures: + - name: simple_exposure + type: dashboard + config: + enabled: False + depends_on: + - ref('model') + owner: + email: something@example.com + - name: notebook_exposure + type: notebook + depends_on: + - ref('model') + - ref('second_model') + owner: + email: something@example.com + name: Some name + description: > + A description of the complex exposure + maturity: medium + meta: + tool: 'my_tool' + languages: + - python + tags: ['my_department'] + url: http://example.com/notebook/1 +""" + +enabled_yaml_level_exposure_yml = """ +version: 2 + +exposures: + - name: simple_exposure + type: dashboard + config: + enabled: True + depends_on: + - ref('model') + owner: + email: something@example.com + - name: notebook_exposure + type: notebook + depends_on: + - ref('model') + - ref('second_model') + owner: + email: something@example.com + name: Some name + description: > + A description of the complex exposure + maturity: medium + meta: + tool: 'my_tool' + languages: + - python + tags: ['my_department'] + url: http://example.com/notebook/1 +""" + +invalid_config_exposure_yml = """ +version: 2 + +exposures: + - name: simple_exposure + type: dashboard + config: + enabled: True and False + depends_on: + - ref('model') + owner: + email: something@example.com +""" diff --git a/tests/functional/exposures/test_exposure_configs.py b/tests/functional/exposures/test_exposure_configs.py new file mode 100644 index 000000000..d1585e575 --- /dev/null +++ b/tests/functional/exposures/test_exposure_configs.py @@ -0,0 +1,121 @@ +from dbt.contracts.graph.model_config import ExposureConfig +from dbt.tests.util import get_manifest, run_dbt, update_config_file +from dbt_common.dataclass_schema import ValidationError +import pytest + +import fixtures + + +class ExposureConfigTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self): + pytest.expected_config = ExposureConfig( + enabled=True, + ) + + +# Test enabled config for exposure in dbt_project.yml +class TestExposureEnabledConfigProjectLevel(ExposureConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": fixtures.models_sql, + "metricflow_time_spine.sql": fixtures.metricflow_time_spine_sql, + "second_model.sql": fixtures.second_model_sql, + "exposure.yml": fixtures.simple_exposure_yml, + "schema.yml": fixtures.source_schema_yml, + "semantic_models.yml": fixtures.semantic_models_schema_yml, + "metrics.yml": fixtures.metrics_schema_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "exposures": { + "simple_exposure": { + "enabled": True, + }, + } + } + + def test_enabled_exposure_config_dbt_project(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "exposure.test.simple_exposure" in manifest.exposures + + new_enabled_config = { + "exposures": { + "test": { + "simple_exposure": { + "enabled": False, + }, + } + } + } + update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "exposure.test.simple_exposure" not in manifest.exposures + assert "exposure.test.notebook_exposure" in manifest.exposures + + +# Test disabled config at exposure level in yml file +class TestConfigYamlLevel(ExposureConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": fixtures.models_sql, + "second_model.sql": fixtures.second_model_sql, + "schema.yml": fixtures.disabled_models_exposure_yml, + } + + def test_exposure_config_yaml_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "exposure.test.simple_exposure" not in manifest.exposures + assert "exposure.test.notebook_exposure" in manifest.exposures + + +# Test inheritence - set configs at project and exposure level - expect exposure level to win +class TestExposureConfigsInheritence(ExposureConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": fixtures.models_sql, + "second_model.sql": fixtures.second_model_sql, + "schema.yml": fixtures.enabled_yaml_level_exposure_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"exposures": {"enabled": False}} + + def test_exposure_all_configs(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + # This should be overridden + assert "exposure.test.simple_exposure" in manifest.exposures + # This should stay disabled + assert "exposure.test.notebook_exposure" not in manifest.exposures + + config_test_table = manifest.exposures.get("exposure.test.simple_exposure").config + + assert isinstance(config_test_table, ExposureConfig) + assert config_test_table == pytest.expected_config + + +# Test invalid config triggers error +class TestInvalidConfig(ExposureConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": fixtures.models_sql, + "second_model.sql": fixtures.second_model_sql, + "schema.yml": fixtures.invalid_config_exposure_yml, + } + + def test_exposure_config_yaml_level(self, project): + with pytest.raises(ValidationError) as excinfo: + run_dbt(["parse"]) + expected_msg = "'True and False' is not of type 'boolean'" + assert expected_msg in str(excinfo.value) diff --git a/tests/functional/exposures/test_exposures.py b/tests/functional/exposures/test_exposures.py new file mode 100644 index 000000000..9d0dbd43f --- /dev/null +++ b/tests/functional/exposures/test_exposures.py @@ -0,0 +1,40 @@ +from dbt.tests.util import get_manifest, run_dbt +import pytest + +import fixtures + + +class TestBasicExposures: + @pytest.fixture(scope="class") + def models(self): + return { + "exposure.yml": fixtures.simple_exposure_yml, + "model.sql": fixtures.models_sql, + "metricflow_time_spine.sql": fixtures.metricflow_time_spine_sql, + "second_model.sql": fixtures.second_model_sql, + "schema.yml": fixtures.source_schema_yml, + "semantic_models.yml": fixtures.semantic_models_schema_yml, + "metrics.yml": fixtures.metrics_schema_yml, + } + + def test_names_with_spaces(self, project): + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + exposure_ids = list(manifest.exposures.keys()) + expected_exposure_ids = [ + "exposure.test.simple_exposure", + "exposure.test.notebook_exposure", + ] + assert exposure_ids == expected_exposure_ids + assert manifest.exposures["exposure.test.simple_exposure"].label == "simple exposure label" + + def test_depends_on(self, project): + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + exposure_depends_on = manifest.exposures["exposure.test.simple_exposure"].depends_on.nodes + expected_exposure_depends_on = [ + "source.test.test_source.test_table", + "model.test.model", + "metric.test.metric", + ] + assert sorted(exposure_depends_on) == sorted(expected_exposure_depends_on) diff --git a/tests/functional/graph_selection/test_graph_selection.py b/tests/functional/graph_selection/test_graph_selection.py new file mode 100644 index 000000000..28d5ff4e0 --- /dev/null +++ b/tests/functional/graph_selection/test_graph_selection.py @@ -0,0 +1,307 @@ +import json +import os + +from dbt.tests.util import check_result_nodes_by_name, run_dbt +import pytest + +from tests.functional.projects import GraphSelection + + +selectors_yml = """ +selectors: +- name: bi_selector + description: This is a BI selector + definition: + method: tag + value: bi +""" + + +def assert_correct_schemas(project): + adapter = project.adapter + with adapter.connection_named("__test"): + exists = adapter.check_schema_exists(project.database, project.test_schema) + assert exists + + schema = project.test_schema + "_and_then" + exists = adapter.check_schema_exists(project.database, schema) + assert not exists + + +def clear_schema(project): + project.run_sql("drop schema if exists {schema} cascade") + project.run_sql("create schema {schema}") + + +class TestGraphSelection(GraphSelection): + # The tests here aiming to test whether the correct node is selected, + # we don't need the run to pass + @pytest.fixture(scope="class") + def selectors(self): + return selectors_yml + + def test_specific_model(self, project): + results = run_dbt(["run", "--select", "users"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + assert_correct_schemas(project) + + def test_tags(self, project, project_root): + results = run_dbt(["run", "--selector", "bi_selector"], expect_pass=False) + check_result_nodes_by_name(results, ["users", "users_rollup"]) + assert_correct_schemas(project) + manifest_path = project_root.join("target/manifest.json") + assert os.path.exists(manifest_path) + with open(manifest_path) as fp: + manifest = json.load(fp) + assert "selectors" in manifest + + def test_tags_and_children(self, project): + results = run_dbt(["run", "--select", "tag:base+"], expect_pass=False) + check_result_nodes_by_name( + results, + [ + "emails_alt", + "users_rollup", + "users", + "alternative.users", + "users_rollup_dependency", + ], + ) + assert_correct_schemas(project) + + def test_tags_and_children_limited(self, project): + results = run_dbt(["run", "--select", "tag:base+2"], expect_pass=False) + check_result_nodes_by_name( + results, ["emails_alt", "users_rollup", "users", "alternative.users"] + ) + assert_correct_schemas(project) + + def test_group(self, project): + expected = ["test.unique_users_id", "test.users"] + results = run_dbt(["ls", "--select", "group:users_group"]) + assert sorted(results) == expected + + def test_specific_model_and_children(self, project): + results = run_dbt(["run", "--select", "users+"], expect_pass=False) + check_result_nodes_by_name( + results, ["users", "users_rollup", "emails_alt", "users_rollup_dependency"] + ) + assert_correct_schemas(project) + + def test_specific_model_and_children_limited(self, project): + results = run_dbt(["run", "--select", "users+1"], expect_pass=False) + check_result_nodes_by_name(results, ["users", "users_rollup", "emails_alt"]) + assert_correct_schemas(project) + + def test_specific_model_and_parents(self, project): + results = run_dbt(["run", "--select", "+users_rollup"], expect_pass=False) + check_result_nodes_by_name(results, ["users_rollup", "users"]) + assert_correct_schemas(project) + + def test_specific_model_and_parents_limited(self, project): + results = run_dbt(["run", "--select", "1+users_rollup"], expect_pass=False) + check_result_nodes_by_name(results, ["users", "users_rollup"]) + assert_correct_schemas(project) + + def test_specific_model_with_exclusion(self, project): + results = run_dbt( + [ + "run", + "--select", + "+users_rollup", + "--exclude", + "models/users_rollup.sql", + ], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users"]) + assert_correct_schemas(project) + + def test_locally_qualified_name(self, project): + results = run_dbt(["run", "--select", "test.subdir"]) + check_result_nodes_by_name(results, ["nested_users", "subdir", "versioned"]) + assert_correct_schemas(project) + + os.chdir( + project.profiles_dir + ) # Change to random directory to test that Path selector works with project-dir + results = run_dbt( + ["run", "--project-dir", str(project.project_root), "--select", "models/test/subdir*"] + ) + check_result_nodes_by_name(results, ["nested_users", "subdir", "versioned"]) + assert_correct_schemas(project) + + results = run_dbt( + [ + "build", + "--project-dir", + str(project.project_root), + "--select", + "models/patch_path_selection_schema.yml", + ] + ) + check_result_nodes_by_name(results, ["subdir"]) + assert_correct_schemas(project) + + # Check that list command works + os.chdir( + project.profiles_dir + ) # Change to random directory to test that Path selector works with project-dir + results = run_dbt( + [ + "-q", + "ls", + "-s", + "path:models/test/subdir.sql", + "--project-dir", + str(project.project_root), + ] + # ["list", "--project-dir", str(project.project_root), "--select", "models/test/subdir*"] + ) + assert len(results) == 1 + + def test_locally_qualified_name_model_with_dots(self, project): + results = run_dbt(["run", "--select", "alternative.users"], expect_pass=False) + check_result_nodes_by_name(results, ["alternative.users"]) + assert_correct_schemas(project) + + results = run_dbt(["run", "--select", "models/alternative.*"], expect_pass=False) + check_result_nodes_by_name(results, ["alternative.users"]) + assert_correct_schemas(project) + + def test_childrens_parents(self, project): + results = run_dbt(["run", "--select", "@base_users"], expect_pass=False) + check_result_nodes_by_name( + results, + [ + "alternative.users", + "users_rollup", + "users", + "emails_alt", + "users_rollup_dependency", + ], + ) + + results = run_dbt(["test", "--select", "test_name:not_null"], expect_pass=False) + check_result_nodes_by_name(results, ["not_null_emails_email"]) + + def test_more_childrens_parents(self, project): + results = run_dbt(["run", "--select", "@users"], expect_pass=False) + check_result_nodes_by_name( + results, ["users_rollup", "users", "emails_alt", "users_rollup_dependency"] + ) + + results = run_dbt(["test", "--select", "test_name:unique"], expect_pass=False) + check_result_nodes_by_name(results, ["unique_users_id", "unique_users_rollup_gender"]) + + def test_concat(self, project): + results = run_dbt(["run", "--select", "@emails_alt", "users_rollup"], expect_pass=False) + check_result_nodes_by_name(results, ["users_rollup", "users", "emails_alt"]) + + def test_concat_multiple(self, project): + results = run_dbt( + ["run", "--select", "@emails_alt", "--select", "users_rollup"], expect_pass=False + ) + check_result_nodes_by_name(results, ["users_rollup", "users", "emails_alt"]) + + def test_concat_exclude(self, project): + results = run_dbt( + [ + "run", + "--select", + "@emails_alt", + "users_rollup", + "--exclude", + "emails_alt", + ], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users_rollup", "users"]) + + def test_concat_exclude_multiple(self, project): + results = run_dbt( + [ + "run", + "--select", + "@emails_alt", + "users_rollup", + "--exclude", + "users", + "--exclude", + "emails_alt", + ], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users_rollup"]) + + def test_concat_exclude_concat(self, project): + results = run_dbt( + [ + "run", + "--select", + "@emails_alt", + "users_rollup", + "--exclude", + "emails_alt", + "users_rollup", + ], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users"]) + + results = run_dbt( + [ + "test", + "--select", + "@emails_alt", + "users_rollup", + "--exclude", + "emails_alt", + "users_rollup", + ], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["unique_users_id"]) + + def test_exposure_parents(self, project): + results = run_dbt(["ls", "--select", "+exposure:seed_ml_exposure"]) + assert sorted(results) == [ + "exposure:test.seed_ml_exposure", + "source:test.raw.seed", + ] + results = run_dbt(["ls", "--select", "1+exposure:user_exposure"]) + assert sorted(results) == [ + "exposure:test.user_exposure", + "test.unique_users_id", + "test.unique_users_rollup_gender", + "test.users", + "test.users_rollup", + "test.versioned.v3", + ] + results = run_dbt(["run", "-m", "+exposure:user_exposure"], expect_pass=False) + check_result_nodes_by_name( + results, + [ + "users_rollup", + "users", + ], + ) + + +class TestListPathGraphSelection(SelectionFixtures): + def test_list_select_with_project_dir(self, project): + # Check that list command works + os.chdir( + project.profiles_dir + ) # Change to random directory to test that Path selector works with project-dir + results = run_dbt( + [ + "-q", + "ls", + "-s", + "path:models/test/subdir.sql", + "--project-dir", + str(project.project_root), + ] + ) + assert results == ["test.test.subdir"] diff --git a/tests/functional/graph_selection/test_group_selection.py b/tests/functional/graph_selection/test_group_selection.py new file mode 100644 index 000000000..8d4e69984 --- /dev/null +++ b/tests/functional/graph_selection/test_group_selection.py @@ -0,0 +1,116 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.projects.graph_selection import ( + read_data, + read_model, + read_schema, +) + + +selectors_yml = """ +selectors: + - name: group_specified_as_string_str + definition: group:users_group + - name: group_specified_as_string_dict + definition: + method: group + value: users_group + - name: users_grouped_childrens_parents + definition: + method: group + value: users_group + childrens_parents: true +""" + + +class TestGroupSelection: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": read_model("schema"), + "base_users.sql": read_model("base_users"), + "users.sql": read_model("users"), + "users_rollup.sql": read_model("users_rollup"), + "versioned_v3.sql": read_model("base_users"), + "users_rollup_dependency.sql": read_model("users_rollup_dependency"), + "emails.sql": read_model("emails"), + "emails_alt.sql": read_model("emails"), + "alternative.users.sql": read_model("alternative_users"), + "never_selected.sql": read_model("never_selected"), + "test": { + "subdir.sql": read_model("subdir"), + "subdir": {"nested_users.sql": read_model("nested_users")}, + }, + } + + @pytest.fixture(scope="class") + def seeds(self, test_data_dir): + return { + "properties.yml": read_schema("properties"), + "seed.csv": read_data("seed"), + "summary_expected.csv": read_data("summary_expected"), + } + + @pytest.fixture(scope="class") + def selectors(self): + return selectors_yml + + def test_select_models_by_group(self, project): + results = run_dbt(["ls", "--model", "group:users_group"]) + assert sorted(results) == ["test.users"] + + def test_select_group_selector_str(self, project): + results = run_dbt(["ls", "--selector", "group_specified_as_string_str"]) + assert sorted(results) == ["test.unique_users_id", "test.users"] + + def test_select_group_selector_dict(self, project): + results = run_dbt(["ls", "--selector", "group_specified_as_string_dict"]) + assert sorted(results) == ["test.unique_users_id", "test.users"] + + def test_select_models_by_group_and_children(self, project): # noqa + results = run_dbt(["ls", "--models", "+group:users_group+"]) + assert sorted(results) == [ + "test.base_users", + "test.emails_alt", + "test.users", + "test.users_rollup", + "test.users_rollup_dependency", + ] + + def test_select_group_and_children(self, project): # noqa + expected = [ + "exposure:test.user_exposure", + "source:test.raw.seed", + "test.base_users", + "test.emails_alt", + "test.unique_users_id", + "test.unique_users_rollup_gender", + "test.users", + "test.users_rollup", + "test.users_rollup_dependency", + ] + results = run_dbt(["ls", "--select", "+group:users_group+"]) + assert sorted(results) == expected + + def test_select_group_and_children_selector_str(self, project): # noqa + expected = [ + "exposure:test.user_exposure", + "source:test.raw.seed", + "test.base_users", + "test.emails_alt", + "test.unique_users_id", + "test.unique_users_rollup_gender", + "test.users", + "test.users_rollup", + "test.users_rollup_dependency", + "test.versioned.v3", + ] + results = run_dbt(["ls", "--selector", "users_grouped_childrens_parents"]) + assert sorted(results) == expected + + # 2 groups + def test_select_models_two_groups(self, project): + expected = ["test.base_users", "test.emails", "test.users"] + results = run_dbt(["ls", "--models", "@group:emails_group group:users_group"]) + assert sorted(results) == expected diff --git a/tests/functional/graph_selection/test_intersection_syntax.py b/tests/functional/graph_selection/test_intersection_syntax.py new file mode 100644 index 000000000..87a0d3762 --- /dev/null +++ b/tests/functional/graph_selection/test_intersection_syntax.py @@ -0,0 +1,238 @@ +from dbt.tests.util import check_result_nodes_by_name, run_dbt +import pytest + +from tests.functional.projects import GraphSelection + + +selectors_yml = """ +selectors: +- name: same_intersection + definition: + intersection: + - fqn: users + - fqn: users +- name: tags_intersection + definition: + intersection: + - tag: bi + - tag: users +- name: triple_descending + definition: + intersection: + - fqn: "*" + - tag: bi + - tag: users +- name: triple_ascending + definition: + intersection: + - tag: users + - tag: bi + - fqn: "*" +- name: intersection_with_exclusion + definition: + intersection: + - method: fqn + value: users_rollup_dependency + parents: true + - method: fqn + value: users + children: true + - exclude: + - users_rollup_dependency +- name: intersection_exclude_intersection + definition: + intersection: + - tag:bi + - "@users" + - exclude: + - intersection: + - tag:bi + - method: fqn + value: users_rollup + children: true +- name: intersection_exclude_intersection_lack + definition: + intersection: + - tag:bi + - "@users" + - exclude: + - intersection: + - method: fqn + value: emails + children_parents: true + - method: fqn + value: emails_alt + children_parents: true +""" + + +# The project and run_seed fixtures will be executed for each test method +class TestIntersectionSyncs(GraphSelection): + # The tests here aiming to test whether the correct node is selected, + # we don't need the run to pass + @pytest.fixture(scope="class") + def selectors(self): + return selectors_yml + + def test_same_model_intersection(self, project): + results = run_dbt(["run", "--models", "users,users"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_same_model_intersection_selectors(self, project): + + results = run_dbt(["run", "--selector", "same_intersection"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_tags_intersection(self, project): + + results = run_dbt(["run", "--models", "tag:bi,tag:users"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_tags_intersection_selectors(self, project): + + results = run_dbt(["run", "--selector", "tags_intersection"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_intersection_triple_descending(self, project): + + results = run_dbt(["run", "--models", "*,tag:bi,tag:users"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_intersection_triple_descending_schema(self, project): + + results = run_dbt(["run", "--models", "*,tag:bi,tag:users"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_intersection_triple_descending_schema_selectors(self, project): + + results = run_dbt(["run", "--selector", "triple_descending"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_intersection_triple_ascending(self, project): + + results = run_dbt(["run", "--models", "tag:users,tag:bi,*"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_intersection_triple_ascending_schema_selectors(self, project): + + results = run_dbt(["run", "--selector", "triple_ascending"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_intersection_with_exclusion(self, project): + + results = run_dbt( + [ + "run", + "--models", + "+users_rollup_dependency,users+", + "--exclude", + "users_rollup_dependency", + ], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users", "users_rollup"]) + + def test_intersection_with_exclusion_selectors(self, project): + + results = run_dbt(["run", "--selector", "intersection_with_exclusion"], expect_pass=False) + check_result_nodes_by_name(results, ["users", "users_rollup"]) + + def test_intersection_exclude_intersection(self, project): + + results = run_dbt( + ["run", "--models", "tag:bi,@users", "--exclude", "tag:bi,users_rollup+"], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users"]) + + def test_intersection_exclude_intersection_selectors(self, project): + + results = run_dbt( + ["run", "--selector", "intersection_exclude_intersection"], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users"]) + + def test_intersection_exclude_intersection_lack(self, project): + + results = run_dbt( + ["run", "--models", "tag:bi,@users", "--exclude", "@emails,@emails_alt"], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users", "users_rollup"]) + + def test_intersection_exclude_intersection_lack_selector(self, project): + + results = run_dbt( + ["run", "--selector", "intersection_exclude_intersection_lack"], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users", "users_rollup"]) + + def test_intersection_exclude_triple_intersection(self, project): + + results = run_dbt( + ["run", "--models", "tag:bi,@users", "--exclude", "*,tag:bi,users_rollup"], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users"]) + + def test_intersection_concat(self, project): + + results = run_dbt(["run", "--models", "tag:bi,@users", "emails_alt"], expect_pass=False) + check_result_nodes_by_name(results, ["users", "users_rollup", "emails_alt"]) + + def test_intersection_concat_intersection(self, project): + + results = run_dbt( + ["run", "--models", "tag:bi,@users", "@emails_alt,emails_alt"], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users", "users_rollup", "emails_alt"]) + + def test_intersection_concat_exclude(self, project): + + results = run_dbt( + [ + "run", + "--models", + "tag:bi,@users", + "emails_alt", + "--exclude", + "users_rollup", + ], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users", "emails_alt"]) + + def test_intersection_concat_exclude_concat(self, project): + + results = run_dbt( + [ + "run", + "--models", + "tag:bi,@users", + "emails_alt,@users", + "--exclude", + "users_rollup_dependency", + "users_rollup", + ], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users", "emails_alt"]) + + def test_intersection_concat_exclude_intersection_concat(self, project): + + results = run_dbt( + [ + "run", + "--models", + "tag:bi,@users", + "emails_alt,@users", + "--exclude", + "@users,users_rollup_dependency", + "@users,users_rollup", + ], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users", "emails_alt"]) diff --git a/tests/functional/graph_selection/test_schema_test_graph_selection.py b/tests/functional/graph_selection/test_schema_test_graph_selection.py new file mode 100644 index 000000000..9777b375e --- /dev/null +++ b/tests/functional/graph_selection/test_schema_test_graph_selection.py @@ -0,0 +1,129 @@ +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.projects import GraphSelection + + +def run_schema_and_assert(project, include, exclude, expected_tests): + # deps must run before seed + run_dbt(["deps"]) + run_dbt(["seed"]) + results = run_dbt(["run", "--exclude", "never_selected"]) + assert len(results) == 12 + + test_args = ["test"] + if include: + test_args += ["--select", include] + if exclude: + test_args += ["--exclude", exclude] + test_results = run_dbt(test_args) + + ran_tests = sorted([test.node.name for test in test_results]) + expected_sorted = sorted(expected_tests) + + assert ran_tests == expected_sorted + + +class TestSchemaTestGraphSelection(GraphSelection): + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root, dbt_integration_project): # noqa: F811 + write_project_files(project_root, "dbt_integration_project", dbt_integration_project) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "dbt_integration_project"}]} + + def test_schema_tests_no_specifiers(self, project): + run_schema_and_assert( + project, + None, + None, + [ + "not_null_emails_email", + "unique_table_model_id", + "unique_users_id", + "unique_users_rollup_gender", + ], + ) + + def test_schema_tests_specify_model(self, project): + run_schema_and_assert(project, "users", None, ["unique_users_id"]) + + def test_schema_tests_specify_tag(self, project): + run_schema_and_assert( + project, "tag:bi", None, ["unique_users_id", "unique_users_rollup_gender"] + ) + + def test_schema_tests_specify_model_and_children(self, project): + run_schema_and_assert( + project, "users+", None, ["unique_users_id", "unique_users_rollup_gender"] + ) + + def test_schema_tests_specify_tag_and_children(self, project): + run_schema_and_assert( + project, + "tag:base+", + None, + ["not_null_emails_email", "unique_users_id", "unique_users_rollup_gender"], + ) + + def test_schema_tests_specify_model_and_parents(self, project): + run_schema_and_assert( + project, + "+users_rollup", + None, + ["unique_users_id", "unique_users_rollup_gender"], + ) + + def test_schema_tests_specify_model_and_parents_with_exclude(self, project): + run_schema_and_assert(project, "+users_rollup", "users_rollup", ["unique_users_id"]) + + def test_schema_tests_specify_exclude_only(self, project): + run_schema_and_assert( + project, + None, + "users_rollup", + ["not_null_emails_email", "unique_table_model_id", "unique_users_id"], + ) + + def test_schema_tests_specify_model_in_pkg(self, project): + run_schema_and_assert( + project, + "test.users_rollup", + None, + # TODO: change this. there's no way to select only direct ancestors + # atm. + ["unique_users_rollup_gender"], + ) + + def test_schema_tests_with_glob(self, project): + run_schema_and_assert( + project, + "*", + "users", + [ + "not_null_emails_email", + "unique_table_model_id", + "unique_users_rollup_gender", + ], + ) + + def test_schema_tests_dep_package_only(self, project): + run_schema_and_assert(project, "dbt_integration_project", None, ["unique_table_model_id"]) + + def test_schema_tests_model_in_dep_pkg(self, project): + run_schema_and_assert( + project, + "dbt_integration_project.table_model", + None, + ["unique_table_model_id"], + ) + + def test_schema_tests_exclude_pkg(self, project): + run_schema_and_assert( + project, + None, + "dbt_integration_project", + ["not_null_emails_email", "unique_users_id", "unique_users_rollup_gender"], + ) diff --git a/tests/functional/graph_selection/test_tag_selection.py b/tests/functional/graph_selection/test_tag_selection.py new file mode 100644 index 000000000..81c8fb174 --- /dev/null +++ b/tests/functional/graph_selection/test_tag_selection.py @@ -0,0 +1,169 @@ +from dbt.tests.util import check_result_nodes_by_name, run_dbt +import pytest + +from tests.functional.projects import GraphSelection + + +selectors_yml = """ +selectors: + - name: tag_specified_as_string_str + definition: tag:specified_as_string + - name: tag_specified_as_string_dict + definition: + method: tag + value: specified_as_string + - name: tag_specified_in_project_children_str + definition: +tag:specified_in_project+ + - name: tag_specified_in_project_children_dict + definition: + method: tag + value: specified_in_project + parents: true + children: true + - name: tagged-bi + definition: + method: tag + value: bi + - name: user_tagged_childrens_parents + definition: + method: tag + value: users + childrens_parents: true + - name: base_ephemerals + definition: + union: + - tag: base + - method: config.materialized + value: ephemeral + - name: warn-severity + definition: + config.severity: warn + - name: roundabout-everything + definition: + union: + - "@tag:users" + - intersection: + - tag: base + - config.materialized: ephemeral +""" + + +class TestTagSelection(GraphSelection): + # The tests here aiming to test whether the correct node is selected, + # we don't need the run to pass + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "models": { + "test": { + "users": {"tags": "specified_as_string"}, + "users_rollup": { + "tags": ["specified_in_project"], + }, + } + }, + } + + @pytest.fixture(scope="class") + def selectors(self): + return selectors_yml + + def test_select_tag(self, project): + results = run_dbt(["run", "--models", "tag:specified_as_string"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_select_tag_selector_str(self, project): + results = run_dbt(["run", "--selector", "tag_specified_as_string_str"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_select_tag_selector_dict(self, project): + results = run_dbt(["run", "--selector", "tag_specified_as_string_dict"], expect_pass=False) + check_result_nodes_by_name(results, ["users"]) + + def test_select_tag_and_children(self, project): # noqa + results = run_dbt(["run", "--models", "+tag:specified_in_project+"], expect_pass=False) + check_result_nodes_by_name(results, ["users", "users_rollup", "users_rollup_dependency"]) + + def test_select_tag_and_children_selector_str(self, project): # noqa + results = run_dbt( + ["run", "--selector", "tag_specified_in_project_children_str"], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users", "users_rollup", "users_rollup_dependency"]) + + def test_select_tag_and_children_selector_dict(self, project): # noqa + results = run_dbt( + ["run", "--selector", "tag_specified_in_project_children_dict"], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["users", "users_rollup", "users_rollup_dependency"]) + + def test_select_tag_in_model_with_project_config(self, project): # noqa + results = run_dbt(["run", "--models", "tag:bi"], expect_pass=False) + check_result_nodes_by_name(results, ["users", "users_rollup"]) + + def test_select_tag_in_model_with_project_config_selector(self, project): # noqa + results = run_dbt(["run", "--selector", "tagged-bi"], expect_pass=False) + check_result_nodes_by_name(results, ["users", "users_rollup"]) + + # check that model configs aren't squashed by project configs + def test_select_tag_in_model_with_project_config_parents_children(self, project): # noqa + results = run_dbt(["run", "--models", "@tag:users"], expect_pass=False) + check_result_nodes_by_name( + results, ["users", "users_rollup", "emails_alt", "users_rollup_dependency"] + ) + + # just the users/users_rollup tests + results = run_dbt(["test", "--models", "@tag:users"], expect_pass=False) + check_result_nodes_by_name(results, ["unique_users_rollup_gender", "unique_users_id"]) + + # just the email test + results = run_dbt( + ["test", "--models", "tag:base,config.materialized:ephemeral"], + expect_pass=False, + ) + check_result_nodes_by_name(results, ["not_null_emails_email"]) + + # also just the email test + results = run_dbt(["test", "--models", "config.severity:warn"], expect_pass=False) + check_result_nodes_by_name(results, ["not_null_emails_email"]) + + # all 3 tests + results = run_dbt( + ["test", "--models", "@tag:users tag:base,config.materialized:ephemeral"], + expect_pass=False, + ) + check_result_nodes_by_name( + results, + ["not_null_emails_email", "unique_users_id", "unique_users_rollup_gender"], + ) + + def test_select_tag_in_model_with_project_config_parents_children_selectors(self, project): + results = run_dbt( + ["run", "--selector", "user_tagged_childrens_parents"], expect_pass=False + ) + check_result_nodes_by_name( + results, ["users", "users_rollup", "emails_alt", "users_rollup_dependency"] + ) + + # just the users/users_rollup tests + results = run_dbt( + ["test", "--selector", "user_tagged_childrens_parents"], expect_pass=False + ) + check_result_nodes_by_name(results, ["unique_users_id", "unique_users_rollup_gender"]) + + # just the email test + results = run_dbt(["test", "--selector", "base_ephemerals"], expect_pass=False) + check_result_nodes_by_name(results, ["not_null_emails_email"]) + + # also just the email test + results = run_dbt(["test", "--selector", "warn-severity"], expect_pass=False) + check_result_nodes_by_name(results, ["not_null_emails_email"]) + + # all 3 tests + results = run_dbt(["test", "--selector", "roundabout-everything"], expect_pass=False) + check_result_nodes_by_name( + results, + ["unique_users_rollup_gender", "unique_users_id", "not_null_emails_email"], + ) diff --git a/tests/functional/graph_selection/test_version_selection.py b/tests/functional/graph_selection/test_version_selection.py new file mode 100644 index 000000000..15f962788 --- /dev/null +++ b/tests/functional/graph_selection/test_version_selection.py @@ -0,0 +1,131 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.projects.graph_selection import ( + read_data, + read_model, + read_schema, +) + + +selectors_yml = """ +selectors: + - name: version_specified_as_string_str + definition: version:latest + - name: version_specified_as_string_dict + definition: + method: version + value: latest + - name: version_childrens_parents + definition: + method: version + value: latest + childrens_parents: true +""" + + +class TestVersionSelection: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": read_schema("schema"), + "versioned_v1.sql": read_model("users"), + "versioned_v2.sql": read_model("users"), + "versioned_v3.sql": read_model("users"), + "versioned_v4.5.sql": read_model("users"), + "versioned_v5.0.sql": read_model("users"), + "versioned_v21.sql": read_model("users"), + "versioned_vtest.sql": read_model("users"), + "base_users.sql": read_model("base_users"), + "users.sql": read_model("users"), + "users_rollup.sql": read_model("users_rollup"), + } + + @pytest.fixture(scope="class") + def seeds(self, test_data_dir): + return { + "properties.yml": read_schema("properties"), + "seed.csv": read_data("seed"), + "summary_expected.csv": read_data("summary_expected"), + } + + @pytest.fixture(scope="class") + def selectors(self): + return selectors_yml + + def test_select_none_versions(self, project): + results = run_dbt(["ls", "--select", "version:none"]) + assert sorted(results) == [ + "test.base_users", + "test.unique_users_id", + "test.unique_users_rollup_gender", + "test.users", + "test.users_rollup", + ] + + def test_select_latest_versions(self, project): + results = run_dbt(["ls", "--select", "version:latest"]) + assert sorted(results) == ["test.versioned.v2"] + + def test_select_old_versions(self, project): + results = run_dbt(["ls", "--select", "version:old"]) + assert sorted(results) == ["test.versioned.v1"] + + def test_select_prerelease_versions(self, project): + results = run_dbt(["ls", "--select", "version:prerelease"]) + assert sorted(results) == [ + "test.versioned.v21", + "test.versioned.v3", + "test.versioned.v4.5", + "test.versioned.v5.0", + "test.versioned.vtest", + ] + + def test_select_version_selector_str(self, project): + results = run_dbt(["ls", "--selector", "version_specified_as_string_str"]) + assert sorted(results) == ["test.versioned.v2"] + + def test_select_version_selector_dict(self, project): + results = run_dbt(["ls", "--selector", "version_specified_as_string_dict"]) + assert sorted(results) == ["test.versioned.v2"] + + def test_select_models_by_version_and_children(self, project): # noqa + results = run_dbt(["ls", "--models", "+version:latest+"]) + assert sorted(results) == ["test.base_users", "test.versioned.v2"] + + def test_select_version_and_children(self, project): # noqa + expected = ["source:test.raw.seed", "test.base_users", "test.versioned.v2"] + results = run_dbt(["ls", "--select", "+version:latest+"]) + assert sorted(results) == expected + + def test_select_group_and_children_selector_str(self, project): # noqa + expected = ["source:test.raw.seed", "test.base_users", "test.versioned.v2"] + results = run_dbt(["ls", "--selector", "version_childrens_parents"]) + assert sorted(results) == expected + + # 2 versions + def test_select_models_two_versions(self, project): + results = run_dbt(["ls", "--models", "version:latest version:old"]) + assert sorted(results) == ["test.versioned.v1", "test.versioned.v2"] + + +my_model_yml = """ +models: + - name: my_model + versions: + - v: 0 +""" + + +class TestVersionZero: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": "select 1 as id", + "another.sql": "select * from {{ ref('my_model') }}", + "schema.yml": my_model_yml, + } + + def test_version_zero(self, project): + results = run_dbt(["run"]) + assert len(results) == 2 diff --git a/tests/functional/incremental_schema_tests/fixtures.py b/tests/functional/incremental_schema_tests/fixtures.py new file mode 100644 index 000000000..2391feb72 --- /dev/null +++ b/tests/functional/incremental_schema_tests/fixtures.py @@ -0,0 +1,394 @@ +# +# Properties +# +_PROPERTIES__SCHEMA = """ +version: 2 + +models: + - name: model_a + columns: + - name: id + tags: [column_level_tag] + data_tests: + - unique + + - name: incremental_ignore + columns: + - name: id + tags: [column_level_tag] + data_tests: + - unique + + - name: incremental_ignore_target + columns: + - name: id + tags: [column_level_tag] + data_tests: + - unique + + - name: incremental_append_new_columns + columns: + - name: id + tags: [column_level_tag] + data_tests: + - unique + + - name: incremental_append_new_columns_target + columns: + - name: id + tags: [column_level_tag] + data_tests: + - unique + + - name: incremental_sync_all_columns + columns: + - name: id + tags: [column_level_tag] + data_tests: + - unique + + - name: incremental_sync_all_columns_target + columns: + - name: id + tags: [column_leveL_tag] + data_tests: + - unique +""" + +# +# Models +# +_MODELS__INCREMENTAL_SYNC_REMOVE_ONLY = """ +{{ + config( + materialized='incremental', + unique_key='id', + on_schema_change='sync_all_columns' + + ) +}} + +WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) + +{% set string_type = 'varchar(10)' %} + +{% if is_incremental() %} + +SELECT id, + cast(field1 as {{string_type}}) as field1 + +FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) + +{% else %} + +select id, + cast(field1 as {{string_type}}) as field1, + cast(field2 as {{string_type}}) as field2 + +from source_data where id <= 3 + +{% endif %} +""" + +_MODELS__INCREMENTAL_IGNORE = """ +{{ + config( + materialized='incremental', + unique_key='id', + on_schema_change='ignore' + ) +}} + +WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) + +{% if is_incremental() %} + +SELECT id, field1, field2, field3, field4 FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) + +{% else %} + +SELECT id, field1, field2 FROM source_data LIMIT 3 + +{% endif %} +""" + +_MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET = """ +{{ + config(materialized='table') +}} + +with source_data as ( + + select * from {{ ref('model_a') }} + +) + +{% set string_type = 'varchar(10)' %} + +select id + ,cast(field1 as {{string_type}}) as field1 + +from source_data +order by id +""" + +_MODELS__INCREMENTAL_IGNORE_TARGET = """ +{{ + config(materialized='table') +}} + +with source_data as ( + + select * from {{ ref('model_a') }} + +) + +select id + ,field1 + ,field2 + +from source_data +""" + +_MODELS__INCREMENTAL_FAIL = """ +{{ + config( + materialized='incremental', + unique_key='id', + on_schema_change='fail' + ) +}} + +WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) + +{% if is_incremental() %} + +SELECT id, field1, field2 FROM source_data + +{% else %} + +SELECT id, field1, field3 FROm source_data + +{% endif %} +""" + +_MODELS__INCREMENTAL_SYNC_ALL_COLUMNS = """ +{{ + config( + materialized='incremental', + unique_key='id', + on_schema_change='sync_all_columns' + + ) +}} + +WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) + +{% set string_type = 'varchar(10)' %} + +{% if is_incremental() %} + +SELECT id, + cast(field1 as {{string_type}}) as field1, + cast(field3 as {{string_type}}) as field3, -- to validate new fields + cast(field4 as {{string_type}}) AS field4 -- to validate new fields + +FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) + +{% else %} + +select id, + cast(field1 as {{string_type}}) as field1, + cast(field2 as {{string_type}}) as field2 + +from source_data where id <= 3 + +{% endif %} +""" + +_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE = """ +{{ + config( + materialized='incremental', + unique_key='id', + on_schema_change='append_new_columns' + ) +}} + +{% set string_type = 'varchar(10)' %} + +WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) + +{% if is_incremental() %} + +SELECT id, + cast(field1 as {{string_type}}) as field1, + cast(field3 as {{string_type}}) as field3, + cast(field4 as {{string_type}}) as field4 +FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) + +{% else %} + +SELECT id, + cast(field1 as {{string_type}}) as field1, + cast(field2 as {{string_type}}) as field2 +FROM source_data where id <= 3 + +{% endif %} +""" + +_MODELS__A = """ +{{ + config(materialized='table') +}} + +with source_data as ( + + select 1 as id, 'aaa' as field1, 'bbb' as field2, 111 as field3, 'TTT' as field4 + union all select 2 as id, 'ccc' as field1, 'ddd' as field2, 222 as field3, 'UUU' as field4 + union all select 3 as id, 'eee' as field1, 'fff' as field2, 333 as field3, 'VVV' as field4 + union all select 4 as id, 'ggg' as field1, 'hhh' as field2, 444 as field3, 'WWW' as field4 + union all select 5 as id, 'iii' as field1, 'jjj' as field2, 555 as field3, 'XXX' as field4 + union all select 6 as id, 'kkk' as field1, 'lll' as field2, 666 as field3, 'YYY' as field4 + +) + +select id + ,field1 + ,field2 + ,field3 + ,field4 + +from source_data +""" + +_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET = """ +{{ + config(materialized='table') +}} + +{% set string_type = 'varchar(10)' %} + +with source_data as ( + + select * from {{ ref('model_a') }} + +) + +select id + ,cast(field1 as {{string_type}}) as field1 + ,cast(field2 as {{string_type}}) as field2 + ,cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3 + ,cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4 + +from source_data +""" + +_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS = """ +{{ + config( + materialized='incremental', + unique_key='id', + on_schema_change='append_new_columns' + ) +}} + +{% set string_type = 'varchar(10)' %} + +WITH source_data AS (SELECT * FROM {{ ref('model_a') }} ) + +{% if is_incremental() %} + +SELECT id, + cast(field1 as {{string_type}}) as field1, + cast(field2 as {{string_type}}) as field2, + cast(field3 as {{string_type}}) as field3, + cast(field4 as {{string_type}}) as field4 +FROM source_data WHERE id NOT IN (SELECT id from {{ this }} ) + +{% else %} + +SELECT id, + cast(field1 as {{string_type}}) as field1, + cast(field2 as {{string_type}}) as field2 +FROM source_data where id <= 3 + +{% endif %} +""" + +_MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET = """ +{{ + config(materialized='table') +}} + +with source_data as ( + + select * from {{ ref('model_a') }} + +) + +{% set string_type = 'varchar(10)' %} + +select id + ,cast(field1 as {{string_type}}) as field1 + --,field2 + ,cast(case when id <= 3 then null else field3 end as {{string_type}}) as field3 + ,cast(case when id <= 3 then null else field4 end as {{string_type}}) as field4 + +from source_data +order by id +""" + +_MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET = """ +{{ + config(materialized='table') +}} + +{% set string_type = 'varchar(10)' %} + +with source_data as ( + + select * from {{ ref('model_a') }} + +) + +select id, + cast(field1 as {{string_type}}) as field1, + cast(CASE WHEN id > 3 THEN NULL ELSE field2 END as {{string_type}}) AS field2, + cast(CASE WHEN id <= 3 THEN NULL ELSE field3 END as {{string_type}}) AS field3, + cast(CASE WHEN id <= 3 THEN NULL ELSE field4 END as {{string_type}}) AS field4 + +from source_data +""" + +# +# Tests +# + +_TESTS__SELECT_FROM_INCREMENTAL_IGNORE = """ +select * from {{ ref('incremental_ignore') }} where false +""" + +_TESTS__SELECT_FROM_A = """ +select * from {{ ref('model_a') }} where false +""" + +_TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS_TARGET = """ +select * from {{ ref('incremental_append_new_columns_target') }} where false +""" + +_TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS = """ +select * from {{ ref('incremental_sync_all_columns') }} where false +""" + +_TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS_TARGET = """ +select * from {{ ref('incremental_sync_all_columns_target') }} where false +""" + +_TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET = """ +select * from {{ ref('incremental_ignore_target') }} where false +""" + +_TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS = """ +select * from {{ ref('incremental_append_new_columns') }} where false +""" diff --git a/tests/functional/incremental_schema_tests/test_incremental_schema.py b/tests/functional/incremental_schema_tests/test_incremental_schema.py new file mode 100644 index 000000000..d4cf9d0fd --- /dev/null +++ b/tests/functional/incremental_schema_tests/test_incremental_schema.py @@ -0,0 +1,120 @@ +from dbt.tests.util import check_relations_equal, run_dbt +import pytest + +from tests.functional.incremental_schema_tests.fixtures import ( + _MODELS__A, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + _MODELS__INCREMENTAL_FAIL, + _MODELS__INCREMENTAL_IGNORE, + _MODELS__INCREMENTAL_IGNORE_TARGET, + _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS, + _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, + _PROPERTIES__SCHEMA, + _TESTS__SELECT_FROM_A, + _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS, + _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + _TESTS__SELECT_FROM_INCREMENTAL_IGNORE, + _TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET, + _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS, + _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, +) + + +class TestIncrementalSchemaChange: + @pytest.fixture(scope="class") + def properties(self): + return { + "schema.yml": _PROPERTIES__SCHEMA, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "incremental_sync_remove_only.sql": _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, + "incremental_ignore.sql": _MODELS__INCREMENTAL_IGNORE, + "incremental_sync_remove_only_target.sql": _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, + "incremental_ignore_target.sql": _MODELS__INCREMENTAL_IGNORE_TARGET, + "incremental_fail.sql": _MODELS__INCREMENTAL_FAIL, + "incremental_sync_all_columns.sql": _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS, + "incremental_append_new_columns_remove_one.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, + "model_a.sql": _MODELS__A, + "incremental_append_new_columns_target.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + "incremental_append_new_columns.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, + "incremental_sync_all_columns_target.sql": _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, + "incremental_append_new_columns_remove_one_target.sql": _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "select_from_incremental.sql": _TESTS__SELECT_FROM_INCREMENTAL_IGNORE, + "select_from_a.sql": _TESTS__SELECT_FROM_A, + "select_from_incremental_append_new_columns_target.sql": _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + "select_from_incremental_sync_all_columns.sql": _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS, + "select_from_incremental_sync_all_columns_target.sql": _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, + "select_from_incremental_ignore_target.sql": _TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET, + "select_from_incremental_append_new_columns.sql": _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS, + } + + def run_twice_and_assert(self, include, compare_source, compare_target, project): + + # dbt run (twice) + run_args = ["run"] + if include: + run_args.extend(("--select", include)) + results_one = run_dbt(run_args) + assert len(results_one) == 3 + + results_two = run_dbt(run_args) + assert len(results_two) == 3 + + check_relations_equal(project.adapter, [compare_source, compare_target]) + + def run_incremental_append_new_columns(self, project): + select = "model_a incremental_append_new_columns incremental_append_new_columns_target" + compare_source = "incremental_append_new_columns" + compare_target = "incremental_append_new_columns_target" + self.run_twice_and_assert(select, compare_source, compare_target, project) + + def run_incremental_append_new_columns_remove_one(self, project): + select = "model_a incremental_append_new_columns_remove_one incremental_append_new_columns_remove_one_target" + compare_source = "incremental_append_new_columns_remove_one" + compare_target = "incremental_append_new_columns_remove_one_target" + self.run_twice_and_assert(select, compare_source, compare_target, project) + + def run_incremental_sync_all_columns(self, project): + select = "model_a incremental_sync_all_columns incremental_sync_all_columns_target" + compare_source = "incremental_sync_all_columns" + compare_target = "incremental_sync_all_columns_target" + self.run_twice_and_assert(select, compare_source, compare_target, project) + + def run_incremental_sync_remove_only(self, project): + select = "model_a incremental_sync_remove_only incremental_sync_remove_only_target" + compare_source = "incremental_sync_remove_only" + compare_target = "incremental_sync_remove_only_target" + self.run_twice_and_assert(select, compare_source, compare_target, project) + + def test_run_incremental_ignore(self, project): + select = "model_a incremental_ignore incremental_ignore_target" + compare_source = "incremental_ignore" + compare_target = "incremental_ignore_target" + self.run_twice_and_assert(select, compare_source, compare_target, project) + + def test_run_incremental_append_new_columns(self, project): + self.run_incremental_append_new_columns(project) + self.run_incremental_append_new_columns_remove_one(project) + + def test_run_incremental_sync_all_columns(self, project): + self.run_incremental_sync_all_columns(project) + self.run_incremental_sync_remove_only(project) + + def test_run_incremental_fail_on_schema_change(self, project): + select = "model_a incremental_fail" + run_dbt(["run", "--models", select, "--full-refresh"]) + results_two = run_dbt(["run", "--models", select], expect_pass=False) + assert "Compilation Error" in results_two[1].message diff --git a/tests/functional/invalid_model_tests/test_invalid_models.py b/tests/functional/invalid_model_tests/test_invalid_models.py new file mode 100644 index 000000000..ad1bd7a7a --- /dev/null +++ b/tests/functional/invalid_model_tests/test_invalid_models.py @@ -0,0 +1,225 @@ +from dbt.exceptions import ParsingError +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + + +# from `test/integration/011_invalid_model_tests`, invalid_model_tests + +# +# Seeds +# + +seeds__base_seed = """ +first_name,last_name,email,gender,ip_address +Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 +Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 +Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 +Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 +Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 +Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220 +Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64 +Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13 +Gary,Day,gday8@nih.gov,Male,35.81.68.186 +Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100 +""" + +# +# Properties +# + +properties__seed_types_yml = """ +version: 2 +seeds: + - name: seeds__base_seed + config: + +column_types: + first_name: varchar(50), + last_name: varchar(50), + email: varchar(50), + gender: varchar(50), + ip_address: varchar(20) + +""" + +# see config in test class +properties__disabled_source_yml = """ +version: 2 +sources: + - name: test_source + schema: "{{ target.schema }}" + tables: + - name: test_table + identifier: seed +""" + +# +# Macros +# + +macros__bad_macros = """ +{% macro some_macro(arg) %} + {{ arg }} +{% endmacro %} +""" + +# +# Models +# + +models__view_bad_enabled_value = """ +{{ + config( + enabled = 'false' + ) +}} + +select * from {{ this.schema }}.seed +""" + +models__view_disabled = """ +{{ + config( + enabled = False + ) +}} + +select * from {{ this.schema }}.seed +""" + +models__dependent_on_view = """ +select * from {{ ref('models__view_disabled') }} +""" + +models__with_bad_macro = """ +{{ some_macro(invalid='test') }} +select 1 as id +""" + +models__referencing_disabled_source = """ +select * from {{ source('test_source', 'test_table') }} +""" + +# +# Tests +# + + +class InvalidModelBase(object): + @pytest.fixture(scope="class") + def seeds(self): + return { + "seeds__base_seed.csv": seeds__base_seed, + } + + @pytest.fixture(scope="class") + def properties(self): + return { + "properties__seed_types.yml": properties__seed_types_yml, + } + + +class TestMalformedEnabledParam(InvalidModelBase): + @pytest.fixture(scope="class") + def models(self): + return { + "models__view_bad_enabled_value.sql": models__view_bad_enabled_value, + } + + def test_view_disabled(self, project): + with pytest.raises(ParsingError) as exc: + run_dbt(["seed"]) + + assert "enabled" in str(exc.value) + + +class TestReferencingDisabledModel(InvalidModelBase): + """Expects that the upstream model is disabled""" + + @pytest.fixture(scope="class") + def models(self): + return { + "models__view_disabled.sql": models__view_disabled, + "models__dependent_on_view.sql": models__dependent_on_view, + } + + def test_referencing_disabled_model(self, project): + with pytest.raises(CompilationError) as exc: + run_dbt() + + assert "which is disabled" in str(exc.value) + + +class TestMissingModelReference(InvalidModelBase): + """Expects that the upstream model is not found""" + + @pytest.fixture(scope="class") + def models(self): + return {"models__dependent_on_view.sql": models__dependent_on_view} + + def test_models_not_found(self, project): + with pytest.raises(CompilationError) as exc: + run_dbt() + + assert "which was not found" in str(exc.value) + + +class TestInvalidMacroCall(InvalidModelBase): + @pytest.fixture(scope="class") + def macros(self): + return {"macros__bad_macros.sql": macros__bad_macros} + + @pytest.fixture(scope="class") + def models(self): + return {"models__with_bad_macro.sql": models__with_bad_macro} + + def test_with_invalid_macro_call(self, project): + with pytest.raises(CompilationError) as exc: + run_dbt(["compile"]) + + assert "macro 'dbt_macro__some_macro' takes no keyword argument 'invalid'" in str( + exc.value + ) + + +class TestInvalidDisabledSource(InvalidModelBase): + @pytest.fixture(scope="class") + def properties(self): + return { + "properties__seed_types.yml": properties__seed_types_yml, + "properties__disabled_source.yml": properties__disabled_source_yml, + } + + @pytest.fixture(scope="class") + def models(self): + return {"models__referencing_disabled_source.sql": models__referencing_disabled_source} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "sources": { + "test": { + "enabled": False, + } + } + } + + def test_postgres_source_disabled(self, project): + with pytest.raises(CompilationError) as exc: + run_dbt() + + assert "which is disabled" in str(exc.value) + + +class TestInvalidMissingSource(InvalidModelBase): + """like TestInvalidDisabledSource but source omitted entirely""" + + @pytest.fixture(scope="class") + def models(self): + return {"models__referencing_disabled_source.sql": models__referencing_disabled_source} + + def test_source_missing(self, project): + with pytest.raises(CompilationError) as exc: + run_dbt() + + assert "which was not found" in str(exc.value) diff --git a/tests/functional/invalid_model_tests/test_model_warning.py b/tests/functional/invalid_model_tests/test_model_warning.py new file mode 100644 index 000000000..c225f2ce2 --- /dev/null +++ b/tests/functional/invalid_model_tests/test_model_warning.py @@ -0,0 +1,18 @@ +from dbt.tests.util import run_dbt +import pytest + + +warnings_sql = """ +{% do exceptions.warn('warning: everything is terrible but not that terrible') %} +{{ exceptions.warn("warning: everything is terrible but not that terrible") }} +select 1 as id +""" + + +class TestEmitWarning: + @pytest.fixture(scope="class") + def models(self): + return {"warnings.sql": warnings_sql} + + def test_warn(self, project): + run_dbt(["run"], expect_pass=True) diff --git a/tests/functional/list/fixtures.py b/tests/functional/list/fixtures.py new file mode 100644 index 000000000..ae5514c62 --- /dev/null +++ b/tests/functional/list/fixtures.py @@ -0,0 +1,213 @@ +import pytest +from dbt.tests.fixtures.project import write_project_files + + +snapshots__snapshot_sql = """ +{% snapshot my_snapshot %} + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id', + strategy='timestamp', + updated_at='updated_at', + ) + }} + select * from {{database}}.{{schema}}.seed +{% endsnapshot %} + +""" + +tests__t_sql = """ +select 1 as id limit 0 + +""" + +models__schema_yml = """ +version: 2 +models: + - name: outer + description: The outer table + columns: + - name: id + description: The id value + data_tests: + - unique + - not_null + +sources: + - name: my_source + tables: + - name: my_table + +""" + +models__ephemeral_sql = """ + +{{ config(materialized='ephemeral') }} + +select + 1 as id, + {{ dbt.date_trunc('day', dbt.current_timestamp()) }} as created_at + +""" + +models__metric_flow = """ + +select + {{ dbt.date_trunc('day', dbt.current_timestamp()) }} as date_day + +""" + +models__incremental_sql = """ +{{ + config( + materialized = "incremental", + incremental_strategy = "delete+insert", + ) +}} + +select * from {{ ref('seed') }} + +{% if is_incremental() %} + where a > (select max(a) from {{this}}) +{% endif %} + +""" + +models__docs_md = """ +{% docs my_docs %} + some docs +{% enddocs %} + +""" + +models__outer_sql = """ +select * from {{ ref('ephemeral') }} + +""" + +models__sub__inner_sql = """ +select * from {{ ref('outer') }} + +""" + +macros__macro_stuff_sql = """ +{% macro cool_macro() %} + wow! +{% endmacro %} + +{% macro other_cool_macro(a, b) %} + cool! +{% endmacro %} + +""" + +seeds__seed_csv = """a,b +1,2 +""" + +analyses__a_sql = """ +select 4 as id + +""" + +semantic_models__sm_yml = """ +semantic_models: + - name: my_sm + model: ref('outer') + defaults: + agg_time_dimension: created_at + entities: + - name: my_entity + type: primary + expr: id + dimensions: + - name: created_at + type: time + type_params: + time_granularity: day + measures: + - name: total_outer_count + agg: count + expr: 1 + +""" + +metrics__m_yml = """ +metrics: + - name: total_outer + type: simple + description: The total count of outer + label: Total Outer + type_params: + measure: total_outer_count +""" + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot.sql": snapshots__snapshot_sql} + + +@pytest.fixture(scope="class") +def tests(): + return {"t.sql": tests__t_sql} + + +@pytest.fixture(scope="class") +def models(): + return { + "schema.yml": models__schema_yml, + "ephemeral.sql": models__ephemeral_sql, + "incremental.sql": models__incremental_sql, + "docs.md": models__docs_md, + "outer.sql": models__outer_sql, + "metricflow_time_spine.sql": models__metric_flow, + "sm.yml": semantic_models__sm_yml, + "m.yml": metrics__m_yml, + "sub": {"inner.sql": models__sub__inner_sql}, + } + + +@pytest.fixture(scope="class") +def macros(): + return {"macro_stuff.sql": macros__macro_stuff_sql} + + +@pytest.fixture(scope="class") +def seeds(): + return {"seed.csv": seeds__seed_csv} + + +@pytest.fixture(scope="class") +def analyses(): + return {"a.sql": analyses__a_sql} + + +@pytest.fixture(scope="class") +def semantic_models(): + return {"sm.yml": semantic_models__sm_yml} + + +@pytest.fixture(scope="class") +def metrics(): + return {"m.yml": metrics__m_yml} + + +@pytest.fixture(scope="class") +def project_files( + project_root, + snapshots, + tests, + models, + macros, + seeds, + analyses, +): + write_project_files(project_root, "snapshots", snapshots) + write_project_files(project_root, "tests", tests) + write_project_files(project_root, "models", models) + write_project_files(project_root, "macros", macros) + write_project_files(project_root, "seeds", seeds) + write_project_files(project_root, "analyses", analyses) diff --git a/tests/functional/list/test_list.py b/tests/functional/list/test_list.py new file mode 100644 index 000000000..3de97602c --- /dev/null +++ b/tests/functional/list/test_list.py @@ -0,0 +1,797 @@ +import json +import os + +from dbt.logger import log_manager +from dbt.tests.util import run_dbt +import pytest + + +class TestList: + def dir(self, value): + return os.path.normpath(value) + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "analysis-paths": [self.dir("analyses")], + "snapshot-paths": [self.dir("snapshots")], + "macro-paths": [self.dir("macros")], + "seed-paths": [self.dir("seeds")], + "test-paths": [self.dir("tests")], + "seeds": { + "quote_columns": False, + }, + } + + def run_dbt_ls(self, args=None, expect_pass=True): + log_manager.stdout_console() + full_args = ["ls"] + if args is not None: + full_args += args + + result = run_dbt(args=full_args, expect_pass=expect_pass) + + log_manager.stdout_console() + return result + + def assert_json_equal(self, json_str, expected): + assert json.loads(json_str) == expected + + def expect_given_output(self, args, expectations): + for key, values in expectations.items(): + ls_result = self.run_dbt_ls(args + ["--output", key]) + if not isinstance(values, (list, tuple)): + values = [values] + assert len(ls_result) == len(values) + for got, expected in zip(ls_result, values): + if key == "json": + self.assert_json_equal(got, expected) + else: + assert got == expected + + def expect_snapshot_output(self, project): + expectations = { + "name": "my_snapshot", + "selector": "test.snapshot.my_snapshot", + "json": { + "name": "my_snapshot", + "package_name": "test", + "depends_on": {"nodes": [], "macros": []}, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "snapshot", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "target_database": project.database, + "target_schema": project.test_schema, + "unique_key": "id", + "strategy": "timestamp", + "updated_at": "updated_at", + "full_refresh": None, + "database": None, + "schema": None, + "alias": None, + "check_cols": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + }, + "unique_id": "snapshot.test.my_snapshot", + "original_file_path": normalize("snapshots/snapshot.sql"), + "alias": "my_snapshot", + "resource_type": "snapshot", + }, + "path": self.dir("snapshots/snapshot.sql"), + } + self.expect_given_output(["--resource-type", "snapshot"], expectations) + + def expect_analyses_output(self): + expectations = { + "name": "a", + "selector": "test.analysis.a", + "json": { + "name": "a", + "package_name": "test", + "depends_on": {"nodes": [], "macros": []}, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "view", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "full_refresh": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "unique_key": None, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + }, + "unique_id": "analysis.test.a", + "original_file_path": normalize("analyses/a.sql"), + "alias": "a", + "resource_type": "analysis", + }, + "path": self.dir("analyses/a.sql"), + } + self.expect_given_output(["--resource-type", "analysis"], expectations) + + def expect_model_output(self): + expectations = { + "name": ("ephemeral", "incremental", "inner", "metricflow_time_spine", "outer"), + "selector": ( + "test.ephemeral", + "test.incremental", + "test.sub.inner", + "test.metricflow_time_spine", + "test.outer", + ), + "json": ( + { + "name": "ephemeral", + "package_name": "test", + "depends_on": { + "nodes": [], + "macros": ["macro.dbt.current_timestamp", "macro.dbt.date_trunc"], + }, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "ephemeral", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "full_refresh": None, + "unique_key": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + }, + "original_file_path": normalize("models/ephemeral.sql"), + "unique_id": "model.test.ephemeral", + "alias": "ephemeral", + "resource_type": "model", + }, + { + "name": "incremental", + "package_name": "test", + "depends_on": { + "nodes": ["seed.test.seed"], + "macros": ["macro.dbt.is_incremental"], + }, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "incremental", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "full_refresh": None, + "unique_key": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": "delete+insert", + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + }, + "original_file_path": normalize("models/incremental.sql"), + "unique_id": "model.test.incremental", + "alias": "incremental", + "resource_type": "model", + }, + { + "name": "inner", + "package_name": "test", + "depends_on": { + "nodes": ["model.test.outer"], + "macros": [], + }, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "view", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "full_refresh": None, + "unique_key": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + }, + "original_file_path": normalize("models/sub/inner.sql"), + "unique_id": "model.test.inner", + "alias": "inner", + "resource_type": "model", + }, + { + "name": "metricflow_time_spine", + "package_name": "test", + "depends_on": { + "nodes": [], + "macros": ["macro.dbt.current_timestamp", "macro.dbt.date_trunc"], + }, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "view", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "full_refresh": None, + "unique_key": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + }, + "original_file_path": normalize("models/metricflow_time_spine.sql"), + "unique_id": "model.test.metricflow_time_spine", + "alias": "metricflow_time_spine", + "resource_type": "model", + }, + { + "name": "outer", + "package_name": "test", + "depends_on": { + "nodes": ["model.test.ephemeral"], + "macros": [], + }, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "view", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "full_refresh": None, + "unique_key": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + }, + "original_file_path": normalize("models/outer.sql"), + "unique_id": "model.test.outer", + "alias": "outer", + "resource_type": "model", + }, + ), + "path": ( + self.dir("models/ephemeral.sql"), + self.dir("models/incremental.sql"), + self.dir("models/sub/inner.sql"), + self.dir("models/metricflow_time_spine.sql"), + self.dir("models/outer.sql"), + ), + } + self.expect_given_output(["--resource-type", "model"], expectations) + + # Do not include ephemeral model - it was not selected + def expect_model_ephemeral_output(self): + expectations = { + "name": ("outer"), + "selector": ("test.outer"), + "json": ( + { + "name": "outer", + "package_name": "test", + "depends_on": {"nodes": [], "macros": []}, + "tags": [], + "config": { + "enabled": True, + "materialized": "view", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "full_refresh": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "access": "protected", + }, + "unique_id": "model.test.ephemeral", + "original_file_path": normalize("models/ephemeral.sql"), + "alias": "outer", + "resource_type": "model", + }, + ), + "path": (self.dir("models/outer.sql"),), + } + self.expect_given_output(["--model", "outer"], expectations) + + def expect_source_output(self): + expectations = { + "name": "my_source.my_table", + "selector": "source:test.my_source.my_table", + "json": { + "config": { + "enabled": True, + }, + "unique_id": "source.test.my_source.my_table", + "original_file_path": normalize("models/schema.yml"), + "package_name": "test", + "name": "my_table", + "source_name": "my_source", + "resource_type": "source", + "tags": [], + }, + "path": self.dir("models/schema.yml"), + } + # should we do this --select automatically for a user if if 'source' is + # in the resource types and there is no '--select' or '--exclude'? + self.expect_given_output( + ["--resource-type", "source", "--select", "source:*"], expectations + ) + + def expect_seed_output(self): + expectations = { + "name": "seed", + "selector": "test.seed", + "json": { + "name": "seed", + "package_name": "test", + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "seed", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "delimiter": ",", + "persist_docs": {}, + "quote_columns": False, + "full_refresh": None, + "unique_key": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + }, + "depends_on": {"macros": []}, + "unique_id": "seed.test.seed", + "original_file_path": normalize("seeds/seed.csv"), + "alias": "seed", + "resource_type": "seed", + }, + "path": self.dir("seeds/seed.csv"), + } + self.expect_given_output(["--resource-type", "seed"], expectations) + + def expect_test_output(self): + expectations = { + "name": ("not_null_outer_id", "t", "unique_outer_id"), + "selector": ("test.not_null_outer_id", "test.t", "test.unique_outer_id"), + "json": ( + { + "name": "not_null_outer_id", + "package_name": "test", + "depends_on": { + "nodes": ["model.test.outer"], + "macros": ["macro.dbt.test_not_null"], + }, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "test", + "severity": "ERROR", + "store_failures": None, + "store_failures_as": None, + "warn_if": "!= 0", + "error_if": "!= 0", + "fail_calc": "count(*)", + "where": None, + "limit": None, + "tags": [], + "database": None, + "schema": "dbt_test__audit", + "alias": None, + "meta": {}, + }, + "unique_id": "test.test.not_null_outer_id.a226f4fb36", + "original_file_path": normalize("models/schema.yml"), + "alias": "not_null_outer_id", + "resource_type": "test", + }, + { + "name": "t", + "package_name": "test", + "depends_on": {"nodes": [], "macros": []}, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "test", + "severity": "ERROR", + "store_failures": None, + "store_failures_as": None, + "warn_if": "!= 0", + "error_if": "!= 0", + "fail_calc": "count(*)", + "where": None, + "limit": None, + "tags": [], + "database": None, + "schema": "dbt_test__audit", + "alias": None, + "meta": {}, + }, + "unique_id": "test.test.t", + "original_file_path": normalize("tests/t.sql"), + "alias": "t", + "resource_type": "test", + }, + { + "name": "unique_outer_id", + "package_name": "test", + "depends_on": { + "nodes": ["model.test.outer"], + "macros": ["macro.dbt.test_unique"], + }, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "test", + "severity": "ERROR", + "store_failures": None, + "store_failures_as": None, + "warn_if": "!= 0", + "error_if": "!= 0", + "fail_calc": "count(*)", + "where": None, + "limit": None, + "tags": [], + "database": None, + "schema": "dbt_test__audit", + "alias": None, + "meta": {}, + }, + "unique_id": "test.test.unique_outer_id.2195e332d3", + "original_file_path": normalize("models/schema.yml"), + "alias": "unique_outer_id", + "resource_type": "test", + }, + ), + "path": ( + self.dir("models/schema.yml"), + self.dir("tests/t.sql"), + self.dir("models/schema.yml"), + ), + } + self.expect_given_output(["--resource-type", "test"], expectations) + + def expect_all_output(self): + # generic test FQNS include the resource + column they're defined on + # models are just package, subdirectory path, name + # sources are like models, ending in source_name.table_name + expected_default = { + "test.ephemeral", + "test.incremental", + "test.snapshot.my_snapshot", + "test.sub.inner", + "test.outer", + "test.seed", + "source:test.my_source.my_table", + "test.not_null_outer_id", + "test.unique_outer_id", + "test.metricflow_time_spine", + "test.t", + "semantic_model:test.my_sm", + "metric:test.total_outer", + } + # analyses have their type inserted into their fqn like tests + expected_all = expected_default | {"test.analysis.a"} + + results = self.run_dbt_ls(["--resource-type", "all", "--select", "*", "source:*"]) + assert set(results) == expected_all + + results = self.run_dbt_ls(["--select", "*", "source:*"]) + assert set(results) == expected_default + + results = self.run_dbt_ls(["--resource-type", "default", "--select", "*", "source:*"]) + assert set(results) == expected_default + + results = self.run_dbt_ls + + def expect_select(self): + results = self.run_dbt_ls(["--resource-type", "test", "--select", "outer"]) + assert set(results) == {"test.not_null_outer_id", "test.unique_outer_id"} + + self.run_dbt_ls(["--resource-type", "test", "--select", "inner"], expect_pass=True) + + results = self.run_dbt_ls(["--resource-type", "test", "--select", "+inner"]) + assert set(results) == {"test.not_null_outer_id", "test.unique_outer_id"} + + results = self.run_dbt_ls(["--resource-type", "semantic_model"]) + assert set(results) == {"semantic_model:test.my_sm"} + + results = self.run_dbt_ls(["--resource-type", "metric"]) + assert set(results) == {"metric:test.total_outer"} + + results = self.run_dbt_ls(["--resource-type", "model", "--select", "outer+"]) + assert set(results) == {"test.outer", "test.sub.inner"} + + results = self.run_dbt_ls(["--resource-type", "model", "--exclude", "inner"]) + assert set(results) == { + "test.ephemeral", + "test.outer", + "test.metricflow_time_spine", + "test.incremental", + } + + results = self.run_dbt_ls(["--select", "config.incremental_strategy:delete+insert"]) + assert set(results) == {"test.incremental"} + + self.run_dbt_ls( + ["--select", "config.incremental_strategy:insert_overwrite"], expect_pass=True + ) + + def expect_resource_type_multiple(self): + """Expect selected resources when --resource-type given multiple times""" + results = self.run_dbt_ls(["--resource-type", "test", "--resource-type", "model"]) + assert set(results) == { + "test.ephemeral", + "test.incremental", + "test.not_null_outer_id", + "test.outer", + "test.sub.inner", + "test.metricflow_time_spine", + "test.t", + "test.unique_outer_id", + } + + results = self.run_dbt_ls( + [ + "--resource-type", + "test", + "--resource-type", + "model", + "--exclude", + "unique_outer_id", + ] + ) + assert set(results) == { + "test.ephemeral", + "test.incremental", + "test.not_null_outer_id", + "test.outer", + "test.metricflow_time_spine", + "test.sub.inner", + "test.t", + } + + results = self.run_dbt_ls( + [ + "--resource-type", + "test", + "--resource-type", + "model", + "--select", + "+inner", + "outer+", + "--exclude", + "inner", + ] + ) + assert set(results) == { + "test.ephemeral", + "test.not_null_outer_id", + "test.unique_outer_id", + "test.outer", + } + + def expect_selected_keys(self, project): + """Expect selected fields of the the selected model""" + expectations = [ + {"database": project.database, "schema": project.test_schema, "alias": "inner"} + ] + results = self.run_dbt_ls( + [ + "--model", + "inner", + "--output", + "json", + "--output-keys", + "database", + "schema", + "alias", + ] + ) + assert len(results) == len(expectations) + + for got, expected in zip(results, expectations): + self.assert_json_equal(got, expected) + + """Expect selected fields when --output-keys given multiple times + """ + expectations = [{"database": project.database, "schema": project.test_schema}] + results = self.run_dbt_ls( + [ + "--model", + "inner", + "--output", + "json", + "--output-keys", + "database", + "--output-keys", + "schema", + ] + ) + assert len(results) == len(expectations) + + for got, expected in zip(results, expectations): + self.assert_json_equal(got, expected) + + """Expect selected fields of the test resource types + """ + expectations = [ + {"name": "not_null_outer_id", "column_name": "id"}, + {"name": "t"}, + {"name": "unique_outer_id", "column_name": "id"}, + ] + results = self.run_dbt_ls( + [ + "--resource-type", + "test", + "--output", + "json", + "--output-keys", + "name", + "column_name", + ] + ) + assert len(results) == len(expectations) + + for got, expected in zip( + sorted(results, key=lambda x: json.loads(x).get("name")), + sorted(expectations, key=lambda x: x.get("name")), + ): + self.assert_json_equal(got, expected) + + """Expect nothing (non-existent keys) for the selected models + """ + expectations = [{}, {}] + results = self.run_dbt_ls( + [ + "--model", + "inner outer", + "--output", + "json", + "--output-keys", + "non_existent_key", + ] + ) + assert len(results) == len(expectations) + + for got, expected in zip(results, expectations): + self.assert_json_equal(got, expected) + + def test_ls(self, project): + self.expect_snapshot_output(project) + self.expect_analyses_output() + self.expect_model_output() + self.expect_source_output() + self.expect_seed_output() + self.expect_test_output() + self.expect_select() + self.expect_resource_type_multiple() + self.expect_all_output() + self.expect_selected_keys(project) + + +def normalize(path): + """On windows, neither is enough on its own: + >>> normcase('C:\\documents/ALL CAPS/subdir\\..') + 'c:\\documents\\all caps\\subdir\\..' + >>> normpath('C:\\documents/ALL CAPS/subdir\\..') + 'C:\\documents\\ALL CAPS' + >>> normpath(normcase('C:\\documents/ALL CAPS/subdir\\..')) + 'c:\\documents\\all caps' + """ + return os.path.normcase(os.path.normpath(path)) diff --git a/tests/functional/logging/test_logging.py b/tests/functional/logging/test_logging.py new file mode 100644 index 000000000..a7e226eb3 --- /dev/null +++ b/tests/functional/logging/test_logging.py @@ -0,0 +1,98 @@ +import json +import os + +from dbt.events.types import InvalidOptionYAML +from dbt.tests.util import get_manifest, run_dbt, read_file +from dbt_common.events.functions import fire_event +import pytest + + +@pytest.fixture(scope="class") +def models(): + return {"my_model.sql": "select 1 as fun"} + + +# This test checks that various events contain node_info, +# which is supplied by the log_contextvars context manager +def test_basic(project, logs_dir): + results = run_dbt(["--log-format=json", "run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + assert "model.test.my_model" in manifest.nodes + + # get log file + log_file = read_file(logs_dir, "dbt.log") + assert log_file + node_start = False + node_finished = False + connection_reused_data = [] + for log_line in log_file.split("\n"): + # skip empty lines + if len(log_line) == 0: + continue + # The adapter logging also shows up, so skip non-json lines + if "[debug]" in log_line: + continue + log_dct = json.loads(log_line) + log_data = log_dct["data"] + log_event = log_dct["info"]["name"] + if log_event == "ConnectionReused": + connection_reused_data.append(log_data) + if log_event == "NodeStart": + node_start = True + if log_event == "NodeFinished": + node_finished = True + assert log_data["run_result"]["adapter_response"] + if node_start and not node_finished: + if log_event == "NodeExecuting": + assert "node_info" in log_data + if log_event == "JinjaLogDebug": + assert "node_info" in log_data + if log_event == "SQLQuery": + assert "node_info" in log_data + if log_event == "TimingInfoCollected": + assert "node_info" in log_data + assert "timing_info" in log_data + + # windows doesn't have the same thread/connection flow so the ConnectionReused + # events don't show up + if os.name != "nt": + # Verify the ConnectionReused event occurs and has the right data + assert connection_reused_data + for data in connection_reused_data: + assert "conn_name" in data and data["conn_name"] + assert "orig_conn_name" in data and data["orig_conn_name"] + + +def test_formatted_logs(project, logs_dir): + # a basic run of dbt with a single model should have 5 `Formatting` events in the json logs + results = run_dbt(["--log-format=json", "run"]) + assert len(results) == 1 + + # get log file + json_log_file = read_file(logs_dir, "dbt.log") + formatted_json_lines = 0 + for log_line in json_log_file.split("\n"): + # skip the empty line at the end + if len(log_line) == 0: + continue + log_dct = json.loads(log_line) + log_event = log_dct["info"]["name"] + if log_event == "Formatting": + formatted_json_lines += 1 + + assert formatted_json_lines == 5 + + +def test_invalid_event_value(project, logs_dir): + results = run_dbt(["--log-format=json", "run"]) + assert len(results) == 1 + with pytest.raises(Exception): + # This should raise because positional arguments are provided to the event + fire_event(InvalidOptionYAML("testing")) + + # Provide invalid type to "option_name" + with pytest.raises(Exception) as excinfo: + fire_event(InvalidOptionYAML(option_name=1)) + + assert str(excinfo.value) == "[InvalidOptionYAML]: Unable to parse dict {'option_name': 1}" diff --git a/tests/functional/logging/test_meta_logging.py b/tests/functional/logging/test_meta_logging.py new file mode 100644 index 000000000..7c535bce7 --- /dev/null +++ b/tests/functional/logging/test_meta_logging.py @@ -0,0 +1,46 @@ +import json + +from dbt.tests.util import read_file, run_dbt +import pytest + + +model1 = "select 1 as fun" +model2 = '{{ config(meta={"owners": ["team1", "team2"]})}} select 1 as fun' +model3 = '{{ config(meta={"key": 1})}} select 1 as fun' + + +@pytest.fixture(scope="class") # noqa +def models(): + return {"model1.sql": model1, "model2.sql": model2, "model3.sql": model3} + + +# This test checks that various events contain node_info, +# which is supplied by the log_contextvars context manager +def test_meta(project, logs_dir): + run_dbt(["--log-format=json", "run"]) + + # get log file + log_file = read_file(logs_dir, "dbt.log") + assert log_file + + for log_line in log_file.split("\n"): + # skip empty lines + if len(log_line) == 0: + continue + # The adapter logging also shows up, so skip non-json lines + if "[debug]" in log_line: + continue + + log_dct = json.loads(log_line) + if "node_info" not in log_dct["data"]: + continue + + print(f"--- log_dct: {log_dct}") + node_info = log_dct["data"]["node_info"] + node_path = node_info["node_path"] + if node_path == "model1.sql": + assert node_info["meta"] == {} + elif node_path == "model2.sql": + assert node_info["meta"] == {"owners": ["team1", "team2"]} + elif node_path == "model3.sql": + assert node_info["meta"] == {"key": 1} diff --git a/tests/functional/macros/data/seed.sql b/tests/functional/macros/data/seed.sql new file mode 100644 index 000000000..21d96c73d --- /dev/null +++ b/tests/functional/macros/data/seed.sql @@ -0,0 +1,23 @@ +create table {schema}.expected_dep_macro ( + foo TEXT, + bar TEXT +); + +create table {schema}.expected_local_macro ( + foo2 TEXT, + bar2 TEXT +); + +create table {schema}.seed ( + id integer, + updated_at timestamp +); + +insert into {schema}.expected_dep_macro (foo, bar) +values ('arg1', 'arg2'); + +insert into {schema}.expected_local_macro (foo2, bar2) +values ('arg1', 'arg2'), ('arg3', 'arg4'); + +insert into {schema}.seed (id, updated_at) +values (1, '2017-01-01'), (2, '2017-01-02'); diff --git a/tests/functional/macros/fixtures.py b/tests/functional/macros/fixtures.py new file mode 100644 index 000000000..77de40951 --- /dev/null +++ b/tests/functional/macros/fixtures.py @@ -0,0 +1,168 @@ +models__dep_macro = """ +{{ + dbt_integration_project.do_something("arg1", "arg2") +}} +""" + +models__materialization_macro = """ +{{ + materialization_macro() +}} +""" + +models__with_undefined_macro = """ +{{ dispatch_to_nowhere() }} +select 1 as id +""" + +models__local_macro = """ +{{ + do_something2("arg1", "arg2") +}} + +union all + +{{ + test.do_something2("arg3", "arg4") +}} +""" + +models__ref_macro = """ +select * from {{ with_ref() }} +""" + +models__override_get_columns_macros = """ +{% set result = adapter.get_columns_in_relation(this) %} +{% if execute and result != 'a string' %} + {% do exceptions.raise_compiler_error('overriding get_columns_in_relation failed') %} +{% endif %} +select 1 as id +""" + +models__deprecated_adapter_macro_model = """ +{% if some_macro('foo', 'bar') != 'foobar' %} + {% do exceptions.raise_compiler_error('invalid foobar') %} +{% endif %} +select 1 as id +""" + +# +# Macros +# +macros__my_macros = """ +{% macro do_something2(foo2, bar2) %} + + select + '{{ foo2 }}' as foo2, + '{{ bar2 }}' as bar2 + +{% endmacro %} + + +{% macro with_ref() %} + + {{ ref('table_model') }} + +{% endmacro %} + + +{% macro dispatch_to_parent() %} + {% set macro = adapter.dispatch('dispatch_to_parent') %} + {{ macro() }} +{% endmacro %} + +{% macro default__dispatch_to_parent() %} + {% set msg = 'No default implementation of dispatch_to_parent' %} + {{ exceptions.raise_compiler_error(msg) }} +{% endmacro %} + +{% macro postgres__dispatch_to_parent() %} + {{ return('') }} +{% endmacro %} +""" + +macros__named_materialization = """ +{% macro materialization_macro() %} + select 1 as foo +{% endmacro %} +""" + +macros__no_default_macros = """ +{% macro do_something2(foo2, bar2) %} + + select + '{{ foo2 }}' as foo2, + '{{ bar2 }}' as bar2 + +{% endmacro %} + + +{% macro with_ref() %} + + {{ ref('table_model') }} + +{% endmacro %} + +{# there is no default__dispatch_to_nowhere! #} +{% macro dispatch_to_nowhere() %} + {% set macro = adapter.dispatch('dispatch_to_nowhere') %} + {{ macro() }} +{% endmacro %} + +{% macro dispatch_to_parent() %} + {% set macro = adapter.dispatch('dispatch_to_parent') %} + {{ macro() }} +{% endmacro %} + +{% macro default__dispatch_to_parent() %} + {% set msg = 'No default implementation of dispatch_to_parent' %} + {{ exceptions.raise_compiler_error(msg) }} +{% endmacro %} + +{% macro postgres__dispatch_to_parent() %} + {{ return('') }} +{% endmacro %} +""" + +macros__override_get_columns_macros = """ +{% macro get_columns_in_relation(relation) %} + {{ return('a string') }} +{% endmacro %} +""" + +macros__package_override_get_columns_macros = """ +{% macro postgres__get_columns_in_relation(relation) %} + {{ return('a string') }} +{% endmacro %} +""" + +macros__deprecated_adapter_macro = """ +{% macro some_macro(arg1, arg2) -%} + {{ adapter_macro('some_macro', arg1, arg2) }} +{%- endmacro %} +""" + +macros__incorrect_dispatch = """ +{% macro cowsay() %} + {{ return(adapter.dispatch('cowsay', 'farm_utils')()) }} +{%- endmacro %} + +{% macro default__cowsay() %} + 'moo' +{% endmacro %} +""" + +# Note the difference between `test_utils` below and `farm_utils` above +models__incorrect_dispatch = """ +select {{ test_utils.cowsay() }} as cowsay +""" + +dbt_project__incorrect_dispatch = """ +name: 'test_utils' +version: '1.0' +config-version: 2 + +profile: 'default' + +macro-paths: ["macros"] +""" diff --git a/tests/functional/macros/package_macro_overrides/dbt_project.yml b/tests/functional/macros/package_macro_overrides/dbt_project.yml new file mode 100644 index 000000000..bcf5c9285 --- /dev/null +++ b/tests/functional/macros/package_macro_overrides/dbt_project.yml @@ -0,0 +1,7 @@ +name: 'package_macro_overrides' +version: '1.0' +config-version: 2 + +profile: 'default' + +macro-paths: ["macros"] diff --git a/tests/functional/macros/package_macro_overrides/macros/macros.sql b/tests/functional/macros/package_macro_overrides/macros/macros.sql new file mode 100644 index 000000000..73fe0ccfb --- /dev/null +++ b/tests/functional/macros/package_macro_overrides/macros/macros.sql @@ -0,0 +1,3 @@ +{% macro get_columns_in_relation(relation) %} + {{ return('a string') }} +{% endmacro %} diff --git a/tests/functional/macros/test_macros.py b/tests/functional/macros/test_macros.py new file mode 100644 index 000000000..4000f68f9 --- /dev/null +++ b/tests/functional/macros/test_macros.py @@ -0,0 +1,272 @@ +from pathlib import Path +import shutil + +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import check_relations_equal, run_dbt +import dbt_common.exceptions +import pytest + +from tests.functional.macros.fixtures import ( + dbt_project__incorrect_dispatch, + macros__deprecated_adapter_macro, + macros__incorrect_dispatch, + macros__my_macros, + macros__named_materialization, + macros__no_default_macros, + macros__override_get_columns_macros, + macros__package_override_get_columns_macros, + models__dep_macro, + models__deprecated_adapter_macro_model, + models__incorrect_dispatch, + models__local_macro, + models__materialization_macro, + models__override_get_columns_macros, + models__ref_macro, + models__with_undefined_macro, +) + + +class TestMacros: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + project.run_sql_file(project.test_data_dir / Path("seed.sql")) + + @pytest.fixture(scope="class") + def models(self): + return { + "dep_macro.sql": models__dep_macro, + "local_macro.sql": models__local_macro, + "ref_macro.sql": models__ref_macro, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"my_macros.sql": macros__my_macros} + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-integration-project", + "revision": "dbt/1.0.0", + }, + ] + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "vars": { + "test": { + "test": "DUMMY", + }, + }, + "macro-paths": ["macros"], + } + + def test_working_macros(self, project): + run_dbt(["deps"]) + results = run_dbt() + assert len(results) == 6 + + check_relations_equal(project.adapter, ["expected_dep_macro", "dep_macro"]) + check_relations_equal(project.adapter, ["expected_local_macro", "local_macro"]) + + +class TestMacrosNamedMaterialization: + @pytest.fixture(scope="class") + def models(self): + return { + "models_materialization_macro.sql": models__materialization_macro, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"macros_named_materialization.sql": macros__named_materialization} + + def test_macro_with_materialization_in_name_works(self, project): + run_dbt(expect_pass=True) + + +class TestInvalidMacros: + @pytest.fixture(scope="class") + def models(self): + return { + "dep_macro.sql": models__dep_macro, + "local_macro.sql": models__local_macro, + "ref_macro.sql": models__ref_macro, + } + + def test_invalid_macro(self, project): + run_dbt(expect_pass=False) + + +class TestAdapterMacroNoDestination: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models__with_undefined_macro} + + @pytest.fixture(scope="class") + def macros(self): + return {"my_macros.sql": macros__no_default_macros} + + def test_invalid_macro(self, project): + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: + run_dbt() + + assert "In dispatch: No macro named 'dispatch_to_nowhere' found" in str(exc.value) + + +class TestMacroOverrideBuiltin: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models__override_get_columns_macros} + + @pytest.fixture(scope="class") + def macros(self): + return {"macros.sql": macros__override_get_columns_macros} + + def test_overrides(self, project): + # the first time, the model doesn't exist + run_dbt() + run_dbt() + + +class TestMacroOverridePackage: + """ + The macro in `override-postgres-get-columns-macros` should override the + `get_columns_in_relation` macro by default. + """ + + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models__override_get_columns_macros} + + @pytest.fixture(scope="class") + def macros(self): + return {"macros.sql": macros__package_override_get_columns_macros} + + def test_overrides(self, project): + # the first time, the model doesn't exist + run_dbt() + run_dbt() + + +class TestMacroNotOverridePackage: + """ + The macro in `override-postgres-get-columns-macros` does NOT override the + `get_columns_in_relation` macro because we tell dispatch to not look at the + postgres macros. + """ + + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models__override_get_columns_macros} + + @pytest.fixture(scope="class") + def macros(self): + return {"macros.sql": macros__package_override_get_columns_macros} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "dispatch": [{"macro_namespace": "dbt", "search_order": ["dbt"]}], + } + + def test_overrides(self, project): + # the first time, the model doesn't exist + run_dbt(expect_pass=False) + run_dbt(expect_pass=False) + + +class TestDispatchMacroOverrideBuiltin(TestMacroOverrideBuiltin): + # test the same functionality as above, but this time, + # dbt.get_columns_in_relation will dispatch to a default__ macro + # from an installed package, per dispatch config search_order + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + shutil.copytree( + project.test_dir / Path("package_macro_overrides"), + project.project_root / Path("package_macro_overrides"), + ) + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "dispatch": [ + { + "macro_namespace": "dbt", + "search_order": ["test", "package_macro_overrides", "dbt"], + } + ], + } + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "local": "./package_macro_overrides", + }, + ] + } + + def test_overrides(self, project): + run_dbt(["deps"]) + run_dbt() + run_dbt() + + +class TestMisnamedMacroNamespace: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + test_utils_files = { + "dbt_project.yml": dbt_project__incorrect_dispatch, + "macros": { + "cowsay.sql": macros__incorrect_dispatch, + }, + } + write_project_files(project_root, "test_utils", test_utils_files) + + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": models__incorrect_dispatch, + } + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + {"local": "test_utils"}, + ] + } + + def test_misnamed_macro_namespace( + self, + project, + ): + run_dbt(["deps"]) + + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: + run_dbt() + + assert "In dispatch: No macro named 'cowsay' found" in str(exc.value) + + +class TestAdapterMacroDeprecated: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models__deprecated_adapter_macro_model} + + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": macros__deprecated_adapter_macro} + + def test_invalid_macro(self, project): + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: + run_dbt() + + assert 'The "adapter_macro" macro has been deprecated' in str(exc.value) diff --git a/tests/functional/materializations/conftest.py b/tests/functional/materializations/conftest.py new file mode 100644 index 000000000..0e29055be --- /dev/null +++ b/tests/functional/materializations/conftest.py @@ -0,0 +1,370 @@ +from dbt.tests.fixtures.project import write_project_files +import pytest + + +override_view_adapter_pass_dep__dbt_project_yml = """ +name: view_adapter_override +version: '1.0' +macro-paths: ['macros'] +config-version: 2 + +""" + +override_view_adapter_pass_dep__macros__override_view_sql = """ +{# copy+pasting the default view impl #} +{% materialization view, default %} + + {%- set identifier = model['alias'] -%} + {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%} + {%- set backup_identifier = model['name'] + '__dbt_backup' -%} + + {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} + {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database, + type='view') -%} + {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier, + schema=schema, database=database, type='view') -%} + + /* + This relation (probably) doesn't exist yet. If it does exist, it's a leftover from + a previous run, and we're going to try to drop it immediately. At the end of this + materialization, we're going to rename the "old_relation" to this identifier, + and then we're going to drop it. In order to make sure we run the correct one of: + - drop view ... + - drop table ... + + We need to set the type of this relation to be the type of the old_relation, if it exists, + or else "view" as a sane default if it does not. Note that if the old_relation does not + exist, then there is nothing to move out of the way and subsequentally drop. In that case, + this relation will be effectively unused. + */ + {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%} + {%- set backup_relation = api.Relation.create(identifier=backup_identifier, + schema=schema, database=database, + type=backup_relation_type) -%} + + {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} + + {{ run_hooks(pre_hooks, inside_transaction=False) }} + + -- drop the temp relations if they exists for some reason + {{ adapter.drop_relation(intermediate_relation) }} + {{ adapter.drop_relation(backup_relation) }} + + -- `BEGIN` happens here: + {{ run_hooks(pre_hooks, inside_transaction=True) }} + + -- build model + {% call statement('main') -%} + {{ create_view_as(intermediate_relation, sql) }} + {%- endcall %} + + -- cleanup + -- move the existing view out of the way + {% if old_relation is not none %} + {{ adapter.rename_relation(target_relation, backup_relation) }} + {% endif %} + {{ adapter.rename_relation(intermediate_relation, target_relation) }} + + {{ run_hooks(post_hooks, inside_transaction=True) }} + + {{ adapter.commit() }} + + {{ drop_relation_if_exists(backup_relation) }} + + {{ run_hooks(post_hooks, inside_transaction=False) }} + + {{ return({'relations': [target_relation]}) }} + +{%- endmaterialization -%} + +""" + +override_view_adapter_macros__override_view_sql = """ +{%- materialization view, adapter='postgres' -%} +{{ exceptions.raise_compiler_error('intentionally raising an error in the postgres view materialization') }} +{%- endmaterialization -%} + +{# copy+pasting the default view impl #} +{% materialization view, default %} + + {%- set identifier = model['alias'] -%} + {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%} + {%- set backup_identifier = model['name'] + '__dbt_backup' -%} + + {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} + {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database, + type='view') -%} + {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier, + schema=schema, database=database, type='view') -%} + + /* + This relation (probably) doesn't exist yet. If it does exist, it's a leftover from + a previous run, and we're going to try to drop it immediately. At the end of this + materialization, we're going to rename the "old_relation" to this identifier, + and then we're going to drop it. In order to make sure we run the correct one of: + - drop view ... + - drop table ... + + We need to set the type of this relation to be the type of the old_relation, if it exists, + or else "view" as a sane default if it does not. Note that if the old_relation does not + exist, then there is nothing to move out of the way and subsequentally drop. In that case, + this relation will be effectively unused. + */ + {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%} + {%- set backup_relation = api.Relation.create(identifier=backup_identifier, + schema=schema, database=database, + type=backup_relation_type) -%} + + {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} + + {{ run_hooks(pre_hooks, inside_transaction=False) }} + + -- drop the temp relations if they exists for some reason + {{ adapter.drop_relation(intermediate_relation) }} + {{ adapter.drop_relation(backup_relation) }} + + -- `BEGIN` happens here: + {{ run_hooks(pre_hooks, inside_transaction=True) }} + + -- build model + {% call statement('main') -%} + {{ create_view_as(intermediate_relation, sql) }} + {%- endcall %} + + -- cleanup + -- move the existing view out of the way + {% if old_relation is not none %} + {{ adapter.rename_relation(target_relation, backup_relation) }} + {% endif %} + {{ adapter.rename_relation(intermediate_relation, target_relation) }} + + {{ run_hooks(post_hooks, inside_transaction=True) }} + + {{ adapter.commit() }} + + {{ drop_relation_if_exists(backup_relation) }} + + {{ run_hooks(post_hooks, inside_transaction=False) }} + + {{ return({'relations': [target_relation]}) }} + +{%- endmaterialization -%} + +""" + +override_view_adapter_dep__dbt_project_yml = """ +name: view_adapter_override +version: '1.0' +macro-paths: ['macros'] +config-version: 2 + +""" + +override_view_adapter_dep__macros__override_view_sql = """ +{%- materialization view, adapter='postgres' -%} +{{ exceptions.raise_compiler_error('intentionally raising an error in the postgres view materialization') }} +{%- endmaterialization -%} + +{# copy+pasting the default view impl #} +{% materialization view, default %} + + {%- set identifier = model['alias'] -%} + {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%} + {%- set backup_identifier = model['name'] + '__dbt_backup' -%} + + {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} + {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database, + type='view') -%} + {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier, + schema=schema, database=database, type='view') -%} + + /* + This relation (probably) doesn't exist yet. If it does exist, it's a leftover from + a previous run, and we're going to try to drop it immediately. At the end of this + materialization, we're going to rename the "old_relation" to this identifier, + and then we're going to drop it. In order to make sure we run the correct one of: + - drop view ... + - drop table ... + + We need to set the type of this relation to be the type of the old_relation, if it exists, + or else "view" as a sane default if it does not. Note that if the old_relation does not + exist, then there is nothing to move out of the way and subsequentally drop. In that case, + this relation will be effectively unused. + */ + {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%} + {%- set backup_relation = api.Relation.create(identifier=backup_identifier, + schema=schema, database=database, + type=backup_relation_type) -%} + + {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} + + {{ run_hooks(pre_hooks, inside_transaction=False) }} + + -- drop the temp relations if they exists for some reason + {{ adapter.drop_relation(intermediate_relation) }} + {{ adapter.drop_relation(backup_relation) }} + + -- `BEGIN` happens here: + {{ run_hooks(pre_hooks, inside_transaction=True) }} + + -- build model + {% call statement('main') -%} + {{ create_view_as(intermediate_relation, sql) }} + {%- endcall %} + + -- cleanup + -- move the existing view out of the way + {% if old_relation is not none %} + {{ adapter.rename_relation(target_relation, backup_relation) }} + {% endif %} + {{ adapter.rename_relation(intermediate_relation, target_relation) }} + + {{ run_hooks(post_hooks, inside_transaction=True) }} + + {{ adapter.commit() }} + + {{ drop_relation_if_exists(backup_relation) }} + + {{ run_hooks(post_hooks, inside_transaction=False) }} + + {{ return({'relations': [target_relation]}) }} + +{%- endmaterialization -%} + +""" + +override_view_default_dep__dbt_project_yml = """ +name: view_default_override +config-version: 2 +version: '1.0' +macro-paths: ['macros'] + +""" + +override_view_default_dep__macros__default_view_sql = """ +{%- materialization view, default -%} +{{ exceptions.raise_compiler_error('intentionally raising an error in the default view materialization') }} +{%- endmaterialization -%} + +""" + +override_view_return_no_relation__dbt_project_yml = """ +name: view_adapter_override +version: 2 +macro-paths: ['macros'] +config-version: 2 + +""" + +override_view_return_no_relation__macros__override_view_sql = """ +{# copy+pasting the default view impl #} +{% materialization view, default %} + + {%- set identifier = model['alias'] -%} + {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%} + {%- set backup_identifier = model['name'] + '__dbt_backup' -%} + + {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} + {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database, + type='view') -%} + {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier, + schema=schema, database=database, type='view') -%} + + /* + This relation (probably) doesn't exist yet. If it does exist, it's a leftover from + a previous run, and we're going to try to drop it immediately. At the end of this + materialization, we're going to rename the "old_relation" to this identifier, + and then we're going to drop it. In order to make sure we run the correct one of: + - drop view ... + - drop table ... + + We need to set the type of this relation to be the type of the old_relation, if it exists, + or else "view" as a sane default if it does not. Note that if the old_relation does not + exist, then there is nothing to move out of the way and subsequentally drop. In that case, + this relation will be effectively unused. + */ + {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%} + {%- set backup_relation = api.Relation.create(identifier=backup_identifier, + schema=schema, database=database, + type=backup_relation_type) -%} + + {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} + + {{ run_hooks(pre_hooks, inside_transaction=False) }} + + -- drop the temp relations if they exists for some reason + {{ adapter.drop_relation(intermediate_relation) }} + {{ adapter.drop_relation(backup_relation) }} + + -- `BEGIN` happens here: + {{ run_hooks(pre_hooks, inside_transaction=True) }} + + -- build model + {% call statement('main') -%} + {{ create_view_as(intermediate_relation, sql) }} + {%- endcall %} + + -- cleanup + -- move the existing view out of the way + {% if old_relation is not none %} + {{ adapter.rename_relation(target_relation, backup_relation) }} + {% endif %} + {{ adapter.rename_relation(intermediate_relation, target_relation) }} + + {{ run_hooks(post_hooks, inside_transaction=True) }} + + {{ adapter.commit() }} + + {{ drop_relation_if_exists(backup_relation) }} + + {{ run_hooks(post_hooks, inside_transaction=False) }} + + {# do not return anything! #} + {# {{ return({'relations': [target_relation]}) }} #} + +{%- endmaterialization -%} +""" + + +@pytest.fixture(scope="class") +def override_view_adapter_pass_dep(project_root): + files = { + "dbt_project.yml": override_view_adapter_pass_dep__dbt_project_yml, + "macros": {"override_view.sql": override_view_adapter_pass_dep__macros__override_view_sql}, + } + write_project_files(project_root, "override-view-adapter-pass-dep", files) + + +@pytest.fixture(scope="class") +def override_view_adapter_macros(project_root): + files = {"override_view.sql": override_view_adapter_macros__override_view_sql} + write_project_files(project_root, "override-view-adapter-macros", files) + + +@pytest.fixture(scope="class") +def override_view_adapter_dep(project_root): + files = { + "dbt_project.yml": override_view_adapter_dep__dbt_project_yml, + "macros": {"override_view.sql": override_view_adapter_dep__macros__override_view_sql}, + } + write_project_files(project_root, "override-view-adapter-dep", files) + + +@pytest.fixture(scope="class") +def override_view_default_dep(project_root): + files = { + "dbt_project.yml": override_view_default_dep__dbt_project_yml, + "macros": {"default_view.sql": override_view_default_dep__macros__default_view_sql}, + } + write_project_files(project_root, "override-view-default-dep", files) + + +@pytest.fixture(scope="class") +def override_view_return_no_relation(project_root): + files = { + "dbt_project.yml": override_view_return_no_relation__dbt_project_yml, + "macros": { + "override_view.sql": override_view_return_no_relation__macros__override_view_sql + }, + } + write_project_files(project_root, "override-view-return-no-relation", files) diff --git a/tests/functional/materializations/fixtures.py b/tests/functional/materializations/fixtures.py new file mode 100644 index 000000000..8828b22bb --- /dev/null +++ b/tests/functional/materializations/fixtures.py @@ -0,0 +1,245 @@ +fct_eph_first_sql = """ +-- fct_eph_first.sql +{{ config(materialized='ephemeral') }} + +with int_eph_first as( + select * from {{ ref('int_eph_first') }} +) + +select * from int_eph_first +""" + +int_eph_first_sql = """ +-- int_eph_first.sql +{{ config(materialized='ephemeral') }} + +select + 1 as first_column, + 2 as second_column +""" + +schema_yml = """ +version: 2 + +models: + - name: int_eph_first + columns: + - name: first_column + data_tests: + - not_null + - name: second_column + data_tests: + - not_null + + - name: fct_eph_first + columns: + - name: first_column + data_tests: + - not_null + - name: second_column + data_tests: + - not_null + +""" + +bar_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar1_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar2_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar3_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar4_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar5_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +baz_sql = """ +{{ config(materialized = 'table') }} +SELECT * FROM {{ ref('bar') }} +""" + +baz1_sql = """ +{{ config(materialized = 'table') }} +SELECT * FROM {{ ref('bar_1') }} +""" + +foo_sql = """ +{{ config(materialized = 'ephemeral') }} + +with source as ( + + select 1 as id + +), renamed as ( + + select id as uid from source + +) + +select * from renamed +""" + +foo1_sql = """ +{{ config(materialized = 'ephemeral') }} + +WITH source AS ( + + SELECT 1 AS id + +), RENAMED as ( + + SELECT id as UID FROM source + +) + +SELECT * FROM renamed +""" + +foo2_sql = """ +{{ config(materialized = 'ephemeral') }} + +WITH source AS ( + + SELECT 1 AS id + +), RENAMED as ( + + SELECT id as UID FROM source + +) + +SELECT * FROM renamed +""" diff --git a/tests/functional/materializations/materialized_view_tests/test_materialized_view.py b/tests/functional/materializations/materialized_view_tests/test_materialized_view.py new file mode 100644 index 000000000..bb634557b --- /dev/null +++ b/tests/functional/materializations/materialized_view_tests/test_materialized_view.py @@ -0,0 +1,117 @@ +from typing import Optional, Tuple + +from dbt.adapters.base.relation import BaseRelation +from dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic +from dbt.tests.adapter.materialized_view.changes import ( + MaterializedViewChanges, + MaterializedViewChangesApplyMixin, + MaterializedViewChangesContinueMixin, + MaterializedViewChangesFailMixin, +) +from dbt.tests.adapter.materialized_view.files import MY_TABLE, MY_VIEW +from dbt.tests.util import get_model_file, set_model_file +import pytest + +from utils import query_indexes, query_relation_type + + +MY_MATERIALIZED_VIEW = """ +{{ config( + materialized='materialized_view', + indexes=[{'columns': ['id']}], +) }} +select * from {{ ref('my_seed') }} +""" + + +class TestPostgresMaterializedViewsBasic(MaterializedViewBasic): + @pytest.fixture(scope="class", autouse=True) + def models(self): + yield { + "my_table.sql": MY_TABLE, + "my_view.sql": MY_VIEW, + "my_materialized_view.sql": MY_MATERIALIZED_VIEW, + } + + @staticmethod + def insert_record(project, table: BaseRelation, record: Tuple[int, int]): + my_id, value = record + project.run_sql(f"insert into {table} (id, value) values ({my_id}, {value})") + + @staticmethod + def refresh_materialized_view(project, materialized_view: BaseRelation): + sql = f"refresh materialized view {materialized_view}" + project.run_sql(sql) + + @staticmethod + def query_row_count(project, relation: BaseRelation) -> int: + sql = f"select count(*) from {relation}" + return project.run_sql(sql, fetch="one")[0] + + @staticmethod + def query_relation_type(project, relation: BaseRelation) -> Optional[str]: + return query_relation_type(project, relation) + + +class PostgresMaterializedViewChanges(MaterializedViewChanges): + @pytest.fixture(scope="class", autouse=True) + def models(self): + yield { + "my_table.sql": MY_TABLE, + "my_view.sql": MY_VIEW, + "my_materialized_view.sql": MY_MATERIALIZED_VIEW, + } + + @staticmethod + def query_relation_type(project, relation: BaseRelation) -> Optional[str]: + return query_relation_type(project, relation) + + @staticmethod + def check_start_state(project, materialized_view): + indexes = query_indexes(project, materialized_view) + assert len(indexes) == 1 + assert indexes[0]["column_names"] == "id" + + @staticmethod + def change_config_via_alter(project, materialized_view): + initial_model = get_model_file(project, materialized_view) + new_model = initial_model.replace( + "indexes=[{'columns': ['id']}]", + "indexes=[{'columns': ['value']}]", + ) + set_model_file(project, materialized_view, new_model) + + @staticmethod + def check_state_alter_change_is_applied(project, materialized_view): + indexes = query_indexes(project, materialized_view) + assert len(indexes) == 1 + assert indexes[0]["column_names"] == "value" + + @staticmethod + def change_config_via_replace(project, materialized_view): + # dbt-postgres does not currently monitor changes of this type + pass + + +class TestPostgresMaterializedViewChangesApply( + PostgresMaterializedViewChanges, MaterializedViewChangesApplyMixin +): + @pytest.mark.skip("dbt-postgres does not currently monitor replace changes.") + def test_change_is_applied_via_replace(self, project, my_materialized_view): + super().test_change_is_applied_via_replace(project, my_materialized_view) + + +class TestPostgresMaterializedViewChangesContinue( + PostgresMaterializedViewChanges, MaterializedViewChangesContinueMixin +): + @pytest.mark.skip("dbt-postgres does not currently monitor replace changes.") + def test_change_is_not_applied_via_replace(self, project, my_materialized_view): + super().test_change_is_not_applied_via_alter(project, my_materialized_view) + + +class TestPostgresMaterializedViewChangesFail( + PostgresMaterializedViewChanges, MaterializedViewChangesFailMixin +): + @pytest.mark.skip("dbt-postgres does not currently monitor replace changes.") + def test_change_is_not_applied_via_replace(self, project, my_materialized_view): + super().test_change_is_not_applied_via_replace(project, my_materialized_view) diff --git a/tests/functional/materializations/materialized_view_tests/utils.py b/tests/functional/materializations/materialized_view_tests/utils.py new file mode 100644 index 000000000..572f21aeb --- /dev/null +++ b/tests/functional/materializations/materialized_view_tests/utils.py @@ -0,0 +1,73 @@ +from typing import Dict, List, Optional + +from dbt.adapters.base.relation import BaseRelation + +from dbt.adapters.postgres.relation import PostgresRelation + + +def query_relation_type(project, relation: BaseRelation) -> Optional[str]: + assert isinstance(relation, PostgresRelation) + sql = f""" + select + 'table' as relation_type + from pg_tables + where schemaname = '{relation.schema}' + and tablename = '{relation.identifier}' + union all + select + 'view' as relation_type + from pg_views + where schemaname = '{relation.schema}' + and viewname = '{relation.identifier}' + union all + select + 'materialized_view' as relation_type + from pg_matviews + where schemaname = '{relation.schema}' + and matviewname = '{relation.identifier}' + """ + results = project.run_sql(sql, fetch="all") + if len(results) == 0: + return None + elif len(results) > 1: + raise ValueError(f"More than one instance of {relation.name} found!") + else: + return results[0][0] + + +def query_indexes(project, relation: BaseRelation) -> List[Dict[str, str]]: + assert isinstance(relation, PostgresRelation) + # pulled directly from `postgres__describe_indexes_template` and manually verified + sql = f""" + select + i.relname as name, + m.amname as method, + ix.indisunique as "unique", + array_to_string(array_agg(a.attname), ',') as column_names + from pg_index ix + join pg_class i + on i.oid = ix.indexrelid + join pg_am m + on m.oid=i.relam + join pg_class t + on t.oid = ix.indrelid + join pg_namespace n + on n.oid = t.relnamespace + join pg_attribute a + on a.attrelid = t.oid + and a.attnum = ANY(ix.indkey) + where t.relname ilike '{ relation.identifier }' + and n.nspname ilike '{ relation.schema }' + and t.relkind in ('r', 'm') + group by 1, 2, 3 + order by 1, 2, 3 + """ + raw_indexes = project.run_sql(sql, fetch="all") + indexes = [ + { + header: value + for header, value in zip(["name", "method", "unique", "column_names"], index) + } + for index in raw_indexes + ] + return indexes diff --git a/tests/functional/materializations/test_custom_materialization.py b/tests/functional/materializations/test_custom_materialization.py new file mode 100644 index 000000000..6aa69a4b5 --- /dev/null +++ b/tests/functional/materializations/test_custom_materialization.py @@ -0,0 +1,80 @@ +from dbt.tests.util import run_dbt +import pytest + + +models__model_sql = """ +{{ config(materialized='view') }} +select 1 as id + +""" + + +@pytest.fixture(scope="class") +def models(): + return {"model.sql": models__model_sql} + + +class TestOverrideAdapterDependency: + # make sure that if there's a dependency with an adapter-specific + # materialization, we honor that materialization + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-adapter-dep"}]} + + def test_adapter_dependency(self, project, override_view_adapter_dep): + run_dbt(["deps"]) + # this should error because the override is buggy + run_dbt(["run"], expect_pass=False) + + +class TestOverrideDefaultDependency: + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-default-dep"}]} + + def test_default_dependency(self, project, override_view_default_dep): + run_dbt(["deps"]) + # this should error because the override is buggy + run_dbt(["run"], expect_pass=False) + + +class TestOverrideAdapterDependencyPassing: + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-adapter-pass-dep"}]} + + def test_default_dependency(self, project, override_view_adapter_pass_dep): + run_dbt(["deps"]) + # this should pass because the override is ok + run_dbt(["run"]) + + +class TestOverrideAdapterLocal: + # make sure that the local default wins over the dependency + # adapter-specific + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-adapter-pass-dep"}]} + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"macro-paths": ["override-view-adapter-macros"]} + + def test_default_dependency( + self, project, override_view_adapter_pass_dep, override_view_adapter_macros + ): + run_dbt(["deps"]) + # this should error because the override is buggy + run_dbt(["run"], expect_pass=False) + + +class TestOverrideDefaultReturn: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"macro-paths": ["override-view-return-no-relation"]} + + def test_default_dependency(self, project, override_view_return_no_relation): + run_dbt(["deps"]) + results = run_dbt(["run"], expect_pass=False) + assert "did not explicitly return a list of relations" in results[0].message diff --git a/tests/functional/materializations/test_incremental.py b/tests/functional/materializations/test_incremental.py new file mode 100644 index 000000000..c3f05d11e --- /dev/null +++ b/tests/functional/materializations/test_incremental.py @@ -0,0 +1,48 @@ +from dbt.context.providers import generate_runtime_model_context +from dbt.tests.util import get_manifest, run_dbt +from dbt_common.exceptions import DbtRuntimeError +import pytest + + +@pytest.fixture(scope="class") +def models(): + return {"my_model.sql": "select 1 as fun"} + + +def test_basic(project): + results = run_dbt(["run"]) + assert len(results) == 1 + + manifest = get_manifest(project.project_root) + model = manifest.nodes["model.test.my_model"] + + # Normally the context will be provided by the macro that calls the + # get_incrmental_strategy_macro method, but for testing purposes + # we create a runtime_model_context. + context = generate_runtime_model_context( + model, + project.adapter.config, + manifest, + ) + + macro_func = project.adapter.get_incremental_strategy_macro(context, "default") + assert macro_func + assert type(macro_func).__name__ == "MacroGenerator" + + macro_func = project.adapter.get_incremental_strategy_macro(context, "append") + assert macro_func + assert type(macro_func).__name__ == "MacroGenerator" + + macro_func = project.adapter.get_incremental_strategy_macro(context, "delete+insert") + assert macro_func + assert type(macro_func).__name__ == "MacroGenerator" + + # This incremental strategy only works for Postgres >= 15 + macro_func = project.adapter.get_incremental_strategy_macro(context, "merge") + assert macro_func + assert type(macro_func).__name__ == "MacroGenerator" + + # This incremental strategy is not valid for Postgres + with pytest.raises(DbtRuntimeError) as excinfo: + macro_func = project.adapter.get_incremental_strategy_macro(context, "insert_overwrite") + assert "insert_overwrite" in str(excinfo.value) diff --git a/tests/functional/materializations/test_runtime_materialization.py b/tests/functional/materializations/test_runtime_materialization.py new file mode 100644 index 000000000..9ad654c9c --- /dev/null +++ b/tests/functional/materializations/test_runtime_materialization.py @@ -0,0 +1,204 @@ +from dbt.tests.util import ( + check_relations_equal, + check_table_does_not_exist, + run_dbt, +) +import pytest + + +models__view_sql = """ +{{ + config( + materialized = "view" + ) +}} + +select * from {{ this.schema }}.seed + +{% if is_incremental() %} + {% do exceptions.raise_compiler_error("is_incremental() evaluated to True in a view") %} +{% endif %} + +""" + +models__incremental_sql = """ +{{ + config( + materialized = "incremental" + ) +}} + +select * from {{ this.schema }}.seed + +{% if is_incremental() %} + + where id > (select max(id) from {{this}}) + +{% endif %} + +""" + +models__materialized_sql = """ +{{ + config( + materialized = "table" + ) +}} + +select * from {{ this.schema }}.seed + +{% if is_incremental() %} + {% do exceptions.raise_compiler_error("is_incremental() evaluated to True in a table") %} +{% endif %} + +""" + +seeds__seed_csv = """id,first_name,last_name,email,gender,ip_address +1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 +2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 +3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 +4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 +5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 +6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220 +7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64 +8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13 +9,Gary,Day,gday8@nih.gov,Male,35.81.68.186 +10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100 +""" + +invalidate_incremental_sql = """ +insert into {schema}.incremental (first_name, last_name, email, gender, ip_address) values + ('Hank', 'Hund', 'hank@yahoo.com', 'Male', '101.239.70.175'); +""" + +update_sql = """ +-- create a view on top of the models +create view {schema}.dependent_view as ( + + select count(*) from {schema}.materialized + union all + select count(*) from {schema}.view + union all + select count(*) from {schema}.incremental + +); + +insert into {schema}.seed (id, first_name, last_name, email, gender, ip_address) values (101, 'Michael', 'Perez', 'mperez0@chronoengine.com', 'Male', '106.239.70.175'); +insert into {schema}.seed (id, first_name, last_name, email, gender, ip_address) values (102, 'Shawn', 'Mccoy', 'smccoy1@reddit.com', 'Male', '24.165.76.182'); +insert into {schema}.seed (id, first_name, last_name, email, gender, ip_address) values (103, 'Kathleen', 'Payne', 'kpayne2@cargocollective.com', 'Female', '113.207.168.106'); +insert into {schema}.seed (id, first_name, last_name, email, gender, ip_address) values (104, 'Jimmy', 'Cooper', 'jcooper3@cargocollective.com', 'Male', '198.24.63.114'); +insert into {schema}.seed (id, first_name, last_name, email, gender, ip_address) values (105, 'Katherine', 'Rice', 'krice4@typepad.com', 'Female', '36.97.186.238'); +""" + +create_view__dbt_tmp_sql = """ +create view {schema}.view__dbt_tmp as ( + select 1 as id +); +""" + +create_view__dbt_backup_sql = """ +create view {schema}.view__dbt_backup as ( + select 1 as id +); +""" + +create_incremental__dbt_tmp_sql = """ +create table {schema}.incremental__dbt_tmp as ( + select 1 as id +); +""" + + +@pytest.fixture(scope="class") +def models(): + return { + "view.sql": models__view_sql, + "incremental.sql": models__incremental_sql, + "materialized.sql": models__materialized_sql, + } + + +@pytest.fixture(scope="class") +def seeds(): + return {"seed.csv": seeds__seed_csv} + + +@pytest.fixture(scope="class", autouse=True) +def setup(project): + run_dbt(["seed"]) + + +class TestRuntimeMaterialization: + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + } + } + + def test_full_refresh( + self, + project, + ): + # initial full-refresh should have no effect + results = run_dbt(["run", "-f"]) + assert len(results) == 3 + + check_relations_equal(project.adapter, ["seed", "view", "incremental", "materialized"]) + + # adds one record to the incremental model. full-refresh should truncate then re-run + project.run_sql(invalidate_incremental_sql) + results = run_dbt(["run", "-f"]) + assert len(results) == 3 + check_relations_equal(project.adapter, ["seed", "incremental"]) + + project.run_sql(update_sql) + + results = run_dbt(["run", "-f"]) + assert len(results) == 3 + + check_relations_equal(project.adapter, ["seed", "view", "incremental", "materialized"]) + + def test_delete_dbt_tmp_relation( + self, + project, + ): + # This creates a __dbt_tmp view - make sure it doesn't interfere with the dbt run + project.run_sql(create_view__dbt_tmp_sql) + results = run_dbt(["run", "--model", "view"]) + assert len(results) == 1 + + check_table_does_not_exist(project.adapter, "view__dbt_tmp") + check_relations_equal(project.adapter, ["seed", "view"]) + + # Again, but with a __dbt_backup view + project.run_sql(create_view__dbt_backup_sql) + results = run_dbt(["run", "--model", "view"]) + assert len(results) == 1 + + check_table_does_not_exist(project.adapter, "view__dbt_backup") + check_relations_equal(project.adapter, ["seed", "view"]) + + # Again, but against the incremental materialization + results = run_dbt(["run", "--model", "incremental"]) + project.run_sql(create_incremental__dbt_tmp_sql) + assert len(results) == 1 + + results = run_dbt(["run", "--model", "incremental", "-f"]) + assert len(results) == 1 + + check_table_does_not_exist(project.adapter, "incremental__dbt_tmp") + check_relations_equal(project.adapter, ["seed", "incremental"]) + + +# Run same tests with models configured with full_refresh +class TestRuntimeMaterializationWithConfig(TestRuntimeMaterialization): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + }, + "models": {"full_refresh": True}, + } diff --git a/tests/functional/materializations/test_supported_languages.py b/tests/functional/materializations/test_supported_languages.py new file mode 100644 index 000000000..251b31ed5 --- /dev/null +++ b/tests/functional/materializations/test_supported_languages.py @@ -0,0 +1,100 @@ +from dbt.tests.util import run_dbt +import pytest + + +custom_mat_tmpl = """ +{% materialization custom_mat{} %} + {%- set target_relation = this.incorporate(type='table') %} + {% call statement('main') -%} + select 1 as column1 + {%- endcall %} + {{ return({'relations': [target_relation]}) }} +{% endmaterialization %} +""" + +models__sql_model = """ +{{ config(materialized='custom_mat') }} +select 1 as fun +""" + +models__py_model = """ +def model(dbt, session): + dbt.config(materialized='custom_mat') + return +""" + + +class SupportedLanguageBase: + model_map = { + "sql": ("sql_model.sql", models__sql_model), + "python": ("py_model.py", models__py_model), + } + + @pytest.fixture(scope="class") + def macros(self): + custom_mat = custom_mat_tmpl.replace("{}", "") + + if hasattr(self, "supported_langs"): + custom_mat = custom_mat_tmpl.replace( + "{}", f", supported_languages=[{self.lang_list()}]" + ) + return {"custom_mat.sql": custom_mat} + + @pytest.fixture(scope="class") + def models(self): + file_name, model = self.model_map[self.use_lang] + return {file_name: model} + + def lang_list(self): + return ", ".join([f"'{l}'" for l in self.supported_langs]) + + def test_language(self, project): + result = run_dbt(["run"], expect_pass=self.expect_pass) + if not self.expect_pass: + assert "only supports languages" in result.results[0].message + + +class TestSupportedLanguages_SupportsDefault_UsingSql(SupportedLanguageBase): + use_lang = "sql" + expect_pass = True + + +class TestSupportedLanguages_SupportsDefault_UsingPython(SupportedLanguageBase): + use_lang = "python" + expect_pass = False + + +class TestSupportedLanguages_SupportsSql_UsingSql(SupportedLanguageBase): + supported_langs = ["sql"] + use_lang = "sql" + expect_pass = True + + +class TestSupportedLanguages_SuppotsSql_UsingPython(SupportedLanguageBase): + supported_langs = ["sql"] + use_lang = "python" + expect_pass = False + + +class TestSupportedLanguages_SuppotsPython_UsingSql(SupportedLanguageBase): + supported_langs = ["python"] + use_lang = "sql" + expect_pass = False + + +class TestSupportedLanguages_SuppotsPython_UsingPython(SupportedLanguageBase): + supported_langs = ["python"] + use_lang = "python" + expect_pass = True + + +class TestSupportedLanguages_SuppotsSqlAndPython_UsingSql(SupportedLanguageBase): + supported_langs = ["sql", "python"] + use_lang = "sql" + expect_pass = True + + +class TestSupportedLanguages_SuppotsSqlAndPython_UsingPython(SupportedLanguageBase): + supported_langs = ["sql", "python"] + use_lang = "python" + expect_pass = True diff --git a/tests/functional/metrics/fixtures.py b/tests/functional/metrics/fixtures.py new file mode 100644 index 000000000..5a8373fbe --- /dev/null +++ b/tests/functional/metrics/fixtures.py @@ -0,0 +1,666 @@ +# not strictly necessary, but this reflects the integration tests currently in the 'dbt-metrics' package right now +# i'm including just the first 10 rows for more concise 'git diff' + +mock_purchase_data_csv = """purchased_at,payment_type,payment_total +2021-02-14 17:52:36,maestro,2418.94 +2021-02-15 04:16:50,jcb,3043.28 +2021-02-15 11:30:45,solo,1505.81 +2021-02-16 13:08:18,,1532.85 +2021-02-17 05:41:34,americanexpress,319.91 +2021-02-18 06:47:32,jcb,2143.44 +2021-02-19 01:37:09,jcb,840.1 +2021-02-19 03:38:49,jcb,1388.18 +2021-02-19 04:22:41,jcb,2834.96 +2021-02-19 13:28:50,china-unionpay,2440.98 +""".strip() + +models_people_sql = """ +select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at +union all +select 2 as id, 'Jeremy' as first_name, 'Cohen' as last_name, 'indigo' as favorite_color, true as loves_dbt, 4 as tenure, current_timestamp as created_at +union all +select 3 as id, 'Callum' as first_name, 'McCann' as last_name, 'emerald' as favorite_color, true as loves_dbt, 0 as tenure, current_timestamp as created_at +""" + +semantic_model_people_yml = """ +version: 2 + +semantic_models: + - name: semantic_people + model: ref('people') + dimensions: + - name: favorite_color + type: categorical + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + - name: people + agg: count + expr: id + entities: + - name: id + type: primary + defaults: + agg_time_dimension: created_at +""" + +basic_metrics_yml = """ +version: 2 + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + + - name: collective_tenure + label: "Collective tenure" + description: Total number of years of team experience + type: simple + type_params: + measure: + name: "years_tenure" + filter: "{{ Dimension('id__loves_dbt') }} is true" + + - name: average_tenure + label: "Average tenure" + description: "The average tenure per person" + type: ratio + type_params: + numerator: collective_tenure + denominator: number_of_people + + - name: average_tenure_plus_one + label: "Average tenure, plus 1" + description: "The average tenure per person" + type: derived + type_params: + metrics: + - average_tenure + expr: "average_tenure + 1" +""" + +metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023, 'mm/dd/yyyy') as date_day +""" + +models_people_metrics_yml = """ +version: 2 + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + + - name: collective_tenure + label: "Collective tenure" + description: Total number of years of team experience + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + join_to_timespine: true + fill_nulls_with: 0 + + - name: collective_window + label: "Collective window" + description: Testing window + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + window: 14 days + + - name: average_tenure + label: Average Tenure + description: The average tenure of our people + type: ratio + type_params: + numerator: collective_tenure + denominator: number_of_people + + - name: average_tenure_minus_people + label: Average Tenure minus People + description: Well this isn't really useful is it? + type: derived + type_params: + expr: average_tenure - number_of_people + metrics: + - average_tenure + - number_of_people + +""" + +invalid_models_people_metrics_yml = """ +version: 2 + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + model: "ref(people)" + calculation_method: count + expression: "*" + timestamp: created_at + time_grains: [day, week, month] + dimensions: + - favorite_color + - loves_dbt + meta: + my_meta: 'testing' + + - name: collective_tenure + label: "Collective tenure" + description: Total number of years of team experience + model: "ref(people)" + calculation_method: sum + expression: tenure + timestamp: created_at + time_grains: [day] + filters: + - field: loves_dbt + operator: 'is' + value: 'true' + +""" + +invalid_metrics_missing_model_yml = """ +version: 2 + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + calculation_method: count + expression: "*" + timestamp: created_at + time_grains: [day, week, month] + dimensions: + - favorite_color + - loves_dbt + meta: + my_meta: 'testing' + + - name: collective_tenure + label: "Collective tenure" + description: Total number of years of team experience + calculation_method: sum + expression: tenure + timestamp: created_at + time_grains: [day] + filters: + - field: loves_dbt + operator: 'is' + value: 'true' + +""" + +invalid_metrics_missing_expression_yml = """ +version: 2 +metrics: + - name: number_of_people + label: "Number of people" + model: "ref(people)" + description: Total count of people + calculation_method: count + timestamp: created_at + time_grains: [day, week, month] + dimensions: + - favorite_color + - loves_dbt + meta: + my_meta: 'testing' +""" + +names_with_spaces_metrics_yml = """ +version: 2 + +metrics: + + - name: number of people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + +""" + +names_with_special_chars_metrics_yml = """ +version: 2 + +metrics: + + - name: number_of_people! + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + +""" + + +names_with_leading_numeric_metrics_yml = """ +version: 2 + +metrics: + + - name: 1_number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + +""" + +long_name_metrics_yml = """ +version: 2 + +metrics: + + - name: this_name_is_going_to_contain_more_than_250_characters_but_be_otherwise_acceptable_and_then_will_throw_an_error_which_I_expect_to_happen_and_repeat_this_name_is_going_to_contain_more_than_250_characters_but_be_otherwise_acceptable_and_then_will_throw_an_error_which_I_expect_to_happen + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + +""" + +downstream_model_sql = """ +-- this model will depend on these three metrics +{% set some_metrics = [ + metric('count_orders'), + metric('sum_order_revenue'), + metric('average_order_value') +] %} + +/* +{% if not execute %} + + -- the only properties available to us at 'parse' time are: + -- 'metric_name' + -- 'package_name' (None if same package) + + {% set metric_names = [] %} + {% for m in some_metrics %} + {% do metric_names.append(m.metric_name) %} + {% endfor %} + + -- this config does nothing, but it lets us check these values below + {{ config(metric_names = metric_names) }} + +{% else %} + + -- these are the properties available to us at 'execution' time + + {% for m in some_metrics %} + name: {{ m.name }} + label: {{ m.label }} + type: {{ m.type }} + type_params: {{ m.type_params }} + filter: {{ m.filter }} + {% endfor %} + +{% endif %} + +select 1 as id +""" + +invalid_derived_metric_contains_model_yml = """ +version: 2 +metrics: + - name: count_orders + label: Count orders + model: ref('mock_purchase_data') + + calculation_method: count + expression: "*" + timestamp: purchased_at + time_grains: [day, week, month, quarter, year] + + dimensions: + - payment_type + + - name: sum_order_revenue + label: Total order revenue + model: ref('mock_purchase_data') + + calculation_method: sum + expression: "payment_total" + timestamp: purchased_at + time_grains: [day, week, month, quarter, year] + + dimensions: + - payment_type + + - name: average_order_value + label: Average Order Value + + calculation_method: derived + expression: "{{metric('sum_order_revenue')}} / {{metric('count_orders')}} " + model: ref('mock_purchase_data') + timestamp: purchased_at + time_grains: [day, week, month, quarter, year] + + dimensions: + - payment_type +""" + +purchasing_model_sql = """ +select purchased_at, payment_type, payment_total from {{ ref('mock_purchase_data') }} +""" + +semantic_model_purchasing_yml = """ +version: 2 + +semantic_models: + - name: semantic_purchasing + model: ref('purchasing') + measures: + - name: num_orders + agg: COUNT + expr: purchased_at + - name: order_revenue + agg: SUM + expr: payment_total + dimensions: + - name: purchased_at + type: TIME + entities: + - name: purchase + type: primary + expr: '1' + defaults: + agg_time_dimension: purchased_at + +""" + +derived_metric_yml = """ +version: 2 +metrics: + - name: count_orders + label: Count orders + type: simple + type_params: + measure: num_orders + + - name: sum_order_revenue + label: Total order revenue + type: simple + type_params: + measure: order_revenue + + - name: average_order_value + label: Average Order Value + type: ratio + type_params: + numerator: + name: sum_order_revenue + denominator: + name: count_orders +""" + +disabled_metric_level_schema_yml = """ +version: 2 + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + config: + enabled: False + meta: + my_meta: 'testing' + + - name: collective_tenure + label: "Collective tenure" + description: Total number of years of team experience + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + +""" + +enabled_metric_level_schema_yml = """ +version: 2 + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + config: + enabled: True + meta: + my_meta: 'testing' + + - name: collective_tenure + label: "Collective tenure" + description: Total number of years of team experience + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + +""" + +models_people_metrics_sql = """ +-- this model will depend on these two metrics +{% set some_metrics = [ + metric('number_of_people'), + metric('collective_tenure') +] %} + +/* +{% if not execute %} + + -- the only properties available to us at 'parse' time are: + -- 'metric_name' + -- 'package_name' (None if same package) + + {% set metric_names = [] %} + {% for m in some_metrics %} + {% do metric_names.append(m.metric_name) %} + {% endfor %} + + -- this config does nothing, but it lets us check these values below + {{ config(metric_names = metric_names) }} + +{% else %} + + -- these are the properties available to us at 'execution' time + + {% for m in some_metrics %} + name: {{ m.name }} + label: {{ m.label }} + type: {{ m.type }} + type_params: {{ m.type_params }} + filter: {{ m.filter }} + window: {{ m.window }} + {% endfor %} + +{% endif %} + +select 1 as id +""" + +metrics_1_yml = """ +version: 2 + +metrics: + - name: some_metric + label: Some Metric + type: simple + type_params: + measure: some_measure +""" + +metrics_2_yml = """ +version: 2 + +metrics: + - name: some_metric + label: Some Metric + type: simple + type_params: + measure: some_measure +""" + +model_a_sql = """ +select 1 as fun +""" + +model_b_sql = """ +-- {{ metric('some_metric') }} + +{% if execute %} + {% set model_ref_node = graph.nodes.values() | selectattr('name', 'equalto', 'model_a') | first %} + {% set relation = api.Relation.create( + database = model_ref_node.database, + schema = model_ref_node.schema, + identifier = model_ref_node.alias + ) + %} +{% else %} + {% set relation = "" %} +{% endif %} + +-- this one is a real ref +select * from {{ ref('model_a') }} +union all +-- this one is synthesized via 'graph' var +select * from {{ relation }} +""" + +invalid_config_metric_yml = """ +version: 2 + +metrics: + - name: number_of_people + label: "Number of people" + config: + enabled: True and False + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' +""" + +invalid_metric_without_timestamp_with_time_grains_yml = """ +version: 2 + +metrics: + - name: number_of_people + label: "Number of people" + description: Total count of people + model: "ref('people')" + time_grains: [day, week, month] + calculation_method: count + expression: "*" + dimensions: + - favorite_color + - loves_dbt + meta: + my_meta: 'testing' +""" + +invalid_metric_without_timestamp_with_window_yml = """ +version: 2 + +metrics: + - name: number_of_people + label: "Number of people" + description: Total count of people + model: "ref('people')" + window: + count: 14 + period: day + calculation_method: count + expression: "*" + dimensions: + - favorite_color + - loves_dbt + meta: + my_meta: 'testing' +""" + +conversion_semantic_model_purchasing_yml = """ +version: 2 + +semantic_models: + - name: semantic_purchasing + model: ref('purchasing') + measures: + - name: num_orders + agg: COUNT + expr: purchased_at + - name: num_visits + agg: SUM + expr: 1 + dimensions: + - name: purchased_at + type: TIME + entities: + - name: purchase + type: primary + expr: '1' + defaults: + agg_time_dimension: purchased_at + +""" + +conversion_metric_yml = """ +version: 2 +metrics: + - name: converted_orders_over_visits + label: Number of orders converted from visits + type: conversion + type_params: + conversion_type_params: + base_measure: num_visits + conversion_measure: num_orders + entity: purchase +""" diff --git a/tests/functional/metrics/test_metric_configs.py b/tests/functional/metrics/test_metric_configs.py new file mode 100644 index 000000000..f4b75015d --- /dev/null +++ b/tests/functional/metrics/test_metric_configs.py @@ -0,0 +1,206 @@ +from dbt.contracts.graph.model_config import MetricConfig +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, run_dbt, update_config_file +from dbt_common.dataclass_schema import ValidationError +from dbt_common.exceptions import CompilationError +import pytest + +from tests.functional.metrics.fixtures import ( + disabled_metric_level_schema_yml, + enabled_metric_level_schema_yml, + invalid_config_metric_yml, + metricflow_time_spine_sql, + models_people_metrics_sql, + models_people_metrics_yml, + models_people_sql, + semantic_model_people_yml, +) + + +class MetricConfigTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self): + pytest.expected_config = MetricConfig( + enabled=True, + ) + + +# Test enabled config in dbt_project.yml +class TestMetricEnabledConfigProjectLevel(MetricConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "schema.yml": models_people_metrics_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "metrics": { + "average_tenure_minus_people": { + "enabled": True, + }, + } + } + + def test_enabled_metric_config_dbt_project(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "metric.test.average_tenure_minus_people" in manifest.metrics + + new_enabled_config = { + "metrics": { + "test": { + "average_tenure_minus_people": { + "enabled": False, + }, + } + } + } + update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "metric.test.average_tenure_minus_people" not in manifest.metrics + assert "metric.test.collective_tenure" in manifest.metrics + + +# Test enabled config at metrics level in yml file +class TestConfigYamlMetricLevel(MetricConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "schema.yml": disabled_metric_level_schema_yml, + } + + def test_metric_config_yaml_metric_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "metric.test.number_of_people" not in manifest.metrics + assert "metric.test.collective_tenure" in manifest.metrics + + +# Test inheritence - set configs at project and metric level - expect metric level to win +class TestMetricConfigsInheritence(MetricConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "schema.yml": enabled_metric_level_schema_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"metrics": {"enabled": False}} + + def test_metrics_all_configs(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + # This should be overridden + assert "metric.test.number_of_people" in manifest.metrics + # This should stay disabled + assert "metric.test.collective_tenure" not in manifest.metrics + + config_test_table = manifest.metrics.get("metric.test.number_of_people").config + + assert isinstance(config_test_table, MetricConfig) + assert config_test_table == pytest.expected_config + + +# Test CompilationError if a model references a disabled metric +class TestDisabledMetricRef(MetricConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "people_metrics.sql": models_people_metrics_sql, + "schema.yml": models_people_metrics_yml, + } + + def test_disabled_metric_ref_model(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "metric.test.number_of_people" in manifest.metrics + assert "metric.test.collective_tenure" in manifest.metrics + assert "model.test.people_metrics" in manifest.nodes + assert "metric.test.average_tenure" in manifest.metrics + assert "metric.test.average_tenure_minus_people" in manifest.metrics + + new_enabled_config = { + "metrics": { + "test": { + "number_of_people": { + "enabled": False, + }, + "average_tenure_minus_people": { + "enabled": False, + }, + "average_tenure": { + "enabled": False, + }, + } + } + } + + update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") + with pytest.raises(CompilationError): + run_dbt(["parse"]) + + +# Test invalid metric configs +class TestInvalidMetric(MetricConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "schema.yml": invalid_config_metric_yml, + } + + def test_invalid_config_metric(self, project): + with pytest.raises(ValidationError) as excinfo: + run_dbt(["parse"]) + expected_msg = "'True and False' is not of type 'boolean'" + assert expected_msg in str(excinfo.value) + + +class TestDisabledMetric(MetricConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "schema.yml": models_people_metrics_yml, + } + + def test_disabling_upstream_metric_errors(self, project): + run_dbt(["parse"]) # shouldn't error out yet + + new_enabled_config = { + "metrics": { + "test": { + "number_of_people": { + "enabled": False, + }, + } + } + } + + update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") + with pytest.raises(ParsingError) as excinfo: + run_dbt(["parse"]) + expected_msg = ( + "The metric `number_of_people` is disabled and thus cannot be referenced." + ) + assert expected_msg in str(excinfo.value) diff --git a/tests/functional/metrics/test_metric_deferral.py b/tests/functional/metrics/test_metric_deferral.py new file mode 100644 index 000000000..11affcd00 --- /dev/null +++ b/tests/functional/metrics/test_metric_deferral.py @@ -0,0 +1,83 @@ +import os +from pathlib import Path + +from dbt.tests.util import copy_file, run_dbt, write_file +import pytest + +from tests.functional.metrics.fixtures import ( + metrics_1_yml, + metrics_2_yml, + model_a_sql, + model_b_sql, +) + + +class TestMetricDeferral: + @pytest.fixture(scope="class", autouse=True) + def setup(self, project): + # Create "prod" schema + prod_schema_name = project.test_schema + "_prod" + project.create_test_schema(schema_name=prod_schema_name) + # Create "state" directory + path = Path(project.project_root) / "state" + Path.mkdir(path) + + @pytest.fixture(scope="class") + def dbt_profile_data(self, unique_schema): + return { + "test": { + "outputs": { + "default": { + "type": "postgres", + "threads": 4, + "host": "localhost", + "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), + "user": os.getenv("POSTGRES_TEST_USER", "root"), + "pass": os.getenv("POSTGRES_TEST_PASS", "password"), + "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), + "schema": unique_schema, + }, + "prod": { + "type": "postgres", + "threads": 4, + "host": "localhost", + "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), + "user": os.getenv("POSTGRES_TEST_USER", "root"), + "pass": os.getenv("POSTGRES_TEST_PASS", "password"), + "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), + "schema": unique_schema + "_prod", + }, + }, + "target": "default", + }, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": model_a_sql, + "model_b.sql": model_b_sql, + "metrics.yml": metrics_1_yml, + } + + @pytest.mark.skip("TODO") + def test_metric_deferral(self, project): + results = run_dbt(["run", "--target", "prod"]) + assert len(results) == 2 + + # copy manifest.json to "state" directory + target_path = os.path.join(project.project_root, "target") + copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) + + # Change metrics file + write_file(metrics_2_yml, project.project_root, "models", "metrics.yml") + + # Confirm that some_metric + model_b are both selected, and model_a is not selected + results = run_dbt(["ls", "-s", "state:modified+", "--state", "state/", "--target", "prod"]) + assert results == ["metric:test.some_metric", "test.model_b"] + + # Run in default schema + results = run_dbt( + ["run", "-s", "state:modified+", "--state", "state/", "--defer", "--target", "default"] + ) + assert len(results) == 1 diff --git a/tests/functional/metrics/test_metric_helper_functions.py b/tests/functional/metrics/test_metric_helper_functions.py new file mode 100644 index 000000000..87a8dad97 --- /dev/null +++ b/tests/functional/metrics/test_metric_helper_functions.py @@ -0,0 +1,54 @@ +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.metrics import ResolvedMetricReference +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.metrics.fixtures import ( + basic_metrics_yml, + metricflow_time_spine_sql, + models_people_sql, + semantic_model_people_yml, +) + + +class TestMetricHelperFunctions: + @pytest.fixture(scope="class") + def models(self): + return { + "metrics.yml": basic_metrics_yml, + "semantic_people.yml": semantic_model_people_yml, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "people.sql": models_people_sql, + } + + def test_derived_metric( + self, + project, + ): + + # initial parse + manifest = run_dbt(["parse"]) + assert isinstance(manifest, Manifest) + + parsed_metric = manifest.metrics["metric.test.average_tenure_plus_one"] + testing_metric = ResolvedMetricReference(parsed_metric, manifest) + + full_metric_dependency = set(testing_metric.full_metric_dependency()) + expected_full_metric_dependency = set( + ["average_tenure_plus_one", "average_tenure", "collective_tenure", "number_of_people"] + ) + assert full_metric_dependency == expected_full_metric_dependency + + base_metric_dependency = set(testing_metric.base_metric_dependency()) + expected_base_metric_dependency = set(["collective_tenure", "number_of_people"]) + assert base_metric_dependency == expected_base_metric_dependency + + derived_metric_dependency = set(testing_metric.derived_metric_dependency()) + expected_derived_metric_dependency = set(["average_tenure_plus_one", "average_tenure"]) + assert derived_metric_dependency == expected_derived_metric_dependency + + derived_metric_dependency_depth = list(testing_metric.derived_metric_dependency_depth()) + expected_derived_metric_dependency_depth = list( + [{"average_tenure_plus_one": 1}, {"average_tenure": 2}] + ) + assert derived_metric_dependency_depth == expected_derived_metric_dependency_depth diff --git a/tests/functional/metrics/test_metrics.py b/tests/functional/metrics/test_metrics.py new file mode 100644 index 000000000..1275e47a7 --- /dev/null +++ b/tests/functional/metrics/test_metrics.py @@ -0,0 +1,399 @@ +from dbt.cli.main import dbtRunner +from dbt.contracts.graph.manifest import Manifest +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, run_dbt +import pytest + +from tests.functional.metrics.fixtures import ( + conversion_metric_yml, + conversion_semantic_model_purchasing_yml, + derived_metric_yml, + downstream_model_sql, + invalid_derived_metric_contains_model_yml, + invalid_metric_without_timestamp_with_time_grains_yml, + invalid_metric_without_timestamp_with_window_yml, + invalid_metrics_missing_expression_yml, + invalid_metrics_missing_model_yml, + invalid_models_people_metrics_yml, + long_name_metrics_yml, + metricflow_time_spine_sql, + mock_purchase_data_csv, + models_people_metrics_yml, + models_people_sql, + names_with_leading_numeric_metrics_yml, + names_with_spaces_metrics_yml, + names_with_special_chars_metrics_yml, + purchasing_model_sql, + semantic_model_people_yml, + semantic_model_purchasing_yml, +) + + +class TestSimpleMetrics: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": models_people_metrics_yml, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "people.sql": models_people_sql, + } + + def test_simple_metric( + self, + project, + ): + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + manifest = get_manifest(project.project_root) + metric_ids = list(manifest.metrics.keys()) + expected_metric_ids = [ + "metric.test.number_of_people", + "metric.test.collective_tenure", + "metric.test.collective_window", + "metric.test.average_tenure", + "metric.test.average_tenure_minus_people", + ] + assert metric_ids == expected_metric_ids + + assert ( + len(manifest.metrics["metric.test.number_of_people"].type_params.input_measures) == 1 + ) + assert ( + len(manifest.metrics["metric.test.collective_tenure"].type_params.input_measures) == 1 + ) + assert ( + len(manifest.metrics["metric.test.collective_window"].type_params.input_measures) == 1 + ) + assert len(manifest.metrics["metric.test.average_tenure"].type_params.input_measures) == 2 + assert ( + len( + manifest.metrics[ + "metric.test.average_tenure_minus_people" + ].type_params.input_measures + ) + == 3 + ) + + +class TestInvalidRefMetrics: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": invalid_models_people_metrics_yml, + "people.sql": models_people_sql, + } + + # tests that we get a ParsingError with an invalid model ref, where + # the model name does not have quotes + def test_simple_metric( + self, + project, + ): + # initial run + with pytest.raises(ParsingError): + run_dbt(["run"]) + + +class TestInvalidMetricMissingModel: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": invalid_metrics_missing_model_yml, + "people.sql": models_people_sql, + } + + # tests that we get a ParsingError with an invalid model ref, where + # the model name does not have quotes + def test_simple_metric( + self, + project, + ): + # initial run + with pytest.raises(ParsingError): + run_dbt(["run"]) + + +class TestInvalidMetricMissingExpression: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": invalid_metrics_missing_expression_yml, + "people.sql": models_people_sql, + } + + # tests that we get a ParsingError with a missing expression + def test_simple_metric( + self, + project, + ): + # initial run + with pytest.raises(ParsingError): + run_dbt(["run"]) + + +class TestNamesWithSpaces: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": names_with_spaces_metrics_yml, + "people.sql": models_people_sql, + } + + def test_names_with_spaces(self, project): + with pytest.raises(ParsingError) as exc: + run_dbt(["run"]) + assert "cannot contain spaces" in str(exc.value) + + +class TestNamesWithSpecialChar: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": names_with_special_chars_metrics_yml, + "people.sql": models_people_sql, + } + + def test_names_with_special_char(self, project): + with pytest.raises(ParsingError) as exc: + run_dbt(["run"]) + assert "must contain only letters, numbers and underscores" in str(exc.value) + + +class TestNamesWithLeandingNumber: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": names_with_leading_numeric_metrics_yml, + "people.sql": models_people_sql, + } + + def test_names_with_leading_number(self, project): + with pytest.raises(ParsingError) as exc: + run_dbt(["run"]) + assert "must begin with a letter" in str(exc.value) + + +class TestLongName: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": long_name_metrics_yml, + "people.sql": models_people_sql, + } + + def test_long_name(self, project): + with pytest.raises(ParsingError) as exc: + run_dbt(["run"]) + assert "cannot contain more than 250 characters" in str(exc.value) + + +class TestInvalidDerivedMetrics: + @pytest.fixture(scope="class") + def models(self): + return { + "derived_metric.yml": invalid_derived_metric_contains_model_yml, + "downstream_model.sql": downstream_model_sql, + } + + def test_invalid_derived_metrics(self, project): + with pytest.raises(ParsingError): + run_dbt(["run"]) + + +class TestMetricDependsOn: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + } + + def test_metric_depends_on(self, project): + manifest = run_dbt(["parse"]) + assert isinstance(manifest, Manifest) + + expected_depends_on_for_number_of_people = ["semantic_model.test.semantic_people"] + expected_depends_on_for_average_tenure = [ + "metric.test.collective_tenure", + "metric.test.number_of_people", + ] + + number_of_people_metric = manifest.metrics["metric.test.number_of_people"] + assert number_of_people_metric.depends_on.nodes == expected_depends_on_for_number_of_people + + average_tenure_metric = manifest.metrics["metric.test.average_tenure"] + assert average_tenure_metric.depends_on.nodes == expected_depends_on_for_average_tenure + + +class TestDerivedMetric: + @pytest.fixture(scope="class") + def models(self): + return { + "downstream_model.sql": downstream_model_sql, + "purchasing.sql": purchasing_model_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_purchasing_yml, + "derived_metric.yml": derived_metric_yml, + } + + # not strictly necessary to use "real" mock data for this test + # we just want to make sure that the 'metric' calls match our expectations + # but this sort of thing is possible, to have actual data flow through and validate results + @pytest.fixture(scope="class") + def seeds(self): + return { + "mock_purchase_data.csv": mock_purchase_data_csv, + } + + def test_derived_metric( + self, + project, + ): + # initial parse + results = run_dbt(["parse"]) + + # make sure all the metrics are in the manifest + manifest = get_manifest(project.project_root) + metric_ids = list(manifest.metrics.keys()) + expected_metric_ids = [ + "metric.test.count_orders", + "metric.test.sum_order_revenue", + "metric.test.average_order_value", + ] + assert metric_ids == expected_metric_ids + + # make sure the downstream_model depends on these metrics + metric_names = ["average_order_value", "count_orders", "sum_order_revenue"] + downstream_model = manifest.nodes["model.test.downstream_model"] + assert sorted(downstream_model.metrics) == [[metric_name] for metric_name in metric_names] + assert sorted(downstream_model.depends_on.nodes) == [ + "metric.test.average_order_value", + "metric.test.count_orders", + "metric.test.sum_order_revenue", + ] + assert sorted(downstream_model.config["metric_names"]) == metric_names + + # make sure the 'expression' metric depends on the two upstream metrics + derived_metric = manifest.metrics["metric.test.average_order_value"] + assert sorted(derived_metric.depends_on.nodes) == [ + "metric.test.count_orders", + "metric.test.sum_order_revenue", + ] + + # actually compile + results = run_dbt(["compile", "--select", "downstream_model"]) + compiled_code = results[0].node.compiled_code + + # make sure all these metrics properties show up in compiled SQL + for metric_name in manifest.metrics: + parsed_metric_node = manifest.metrics[metric_name] + for property in [ + "name", + "label", + "type", + "type_params", + "filter", + ]: + expected_value = getattr(parsed_metric_node, property) + assert f"{property}: {expected_value}" in compiled_code + + +class TestInvalidTimestampTimeGrainsMetrics: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": invalid_metric_without_timestamp_with_time_grains_yml, + "people.sql": models_people_sql, + } + + # Tests that we get a ParsingError with an invalid metric definition. + # This metric definition is missing timestamp but HAS a time_grains property + def test_simple_metric( + self, + project, + ): + # initial run + with pytest.raises(ParsingError): + run_dbt(["run"]) + + +class TestInvalidTimestampWindowMetrics: + @pytest.fixture(scope="class") + def models(self): + return { + "people_metrics.yml": invalid_metric_without_timestamp_with_window_yml, + "people.sql": models_people_sql, + } + + # Tests that we get a ParsingError with an invalid metric definition. + # This metric definition is missing timestamp but HAS a window property + def test_simple_metric( + self, + project, + ): + # initial run + with pytest.raises(ParsingError): + run_dbt(["run"]) + + +class TestConversionMetric: + @pytest.fixture(scope="class") + def models(self): + return { + "purchasing.sql": purchasing_model_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": conversion_semantic_model_purchasing_yml, + "conversion_metric.yml": conversion_metric_yml, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "mock_purchase_data.csv": mock_purchase_data_csv, + } + + def test_conversion_metric( + self, + project, + ): + # initial parse + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + + # make sure the metric is in the manifest + manifest = get_manifest(project.project_root) + metric_ids = list(manifest.metrics.keys()) + expected_metric_ids = [ + "metric.test.converted_orders_over_visits", + ] + assert metric_ids == expected_metric_ids + assert manifest.metrics[ + "metric.test.converted_orders_over_visits" + ].type_params.conversion_type_params + assert ( + len( + manifest.metrics[ + "metric.test.converted_orders_over_visits" + ].type_params.input_measures + ) + == 2 + ) + assert ( + manifest.metrics[ + "metric.test.converted_orders_over_visits" + ].type_params.conversion_type_params.window + is None + ) + assert ( + manifest.metrics[ + "metric.test.converted_orders_over_visits" + ].type_params.conversion_type_params.entity + == "purchase" + ) diff --git a/tests/functional/minimal_cli/fixtures.py b/tests/functional/minimal_cli/fixtures.py new file mode 100644 index 000000000..dadfb130f --- /dev/null +++ b/tests/functional/minimal_cli/fixtures.py @@ -0,0 +1,111 @@ +import pytest +from click.testing import CliRunner + +models__schema_yml = """ +version: 2 +models: + - name: sample_model + columns: + - name: sample_num + data_tests: + - accepted_values: + values: [1, 2] + - not_null + - name: sample_bool + data_tests: + - not_null + - unique +""" + +models__sample_model = """ +select * from {{ ref('sample_seed') }} +""" + +snapshots__sample_snapshot = """ +{% snapshot orders_snapshot %} + +{{ + config( + target_database='dbt', + target_schema='snapshots', + unique_key='sample_num', + strategy='timestamp', + updated_at='updated_at', + ) +}} + +select * from {{ ref('sample_model') }} + +{% endsnapshot %} +""" + +seeds__sample_seed = """sample_num,sample_bool +1,true +2,false +,true +""" + +tests__failing_sql = """ +{{ config(severity = 'warn') }} +select 1 +""" + + +class BaseConfigProject: + @pytest.fixture() + def runner(self): + return CliRunner() + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "name": "jaffle_shop", + "profile": "jaffle_shop", + "version": "0.1.0", + "config-version": 2, + "clean-targets": ["target", "dbt_packages", "logs"], + } + + @pytest.fixture(scope="class") + def profiles_config_update(self): + return { + "jaffle_shop": { + "outputs": { + "dev": { + "type": "postgres", + "dbname": "dbt", + "schema": "jaffle_shop", + "host": "localhost", + "user": "root", + "port": 5432, + "pass": "password", + } + }, + "target": "dev", + } + } + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"package": "dbt-labs/dbt_utils", "version": "1.0.0"}]} + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "sample_model.sql": models__sample_model, + } + + @pytest.fixture(scope="class") + def snapshots(self): + return {"sample_snapshot.sql": snapshots__sample_snapshot} + + @pytest.fixture(scope="class") + def seeds(self): + return {"sample_seed.csv": seeds__sample_seed} + + @pytest.fixture(scope="class") + def tests(self): + return { + "failing.sql": tests__failing_sql, + } diff --git a/tests/functional/minimal_cli/test_minimal_cli.py b/tests/functional/minimal_cli/test_minimal_cli.py new file mode 100644 index 000000000..64bcb1f1e --- /dev/null +++ b/tests/functional/minimal_cli/test_minimal_cli.py @@ -0,0 +1,62 @@ +from dbt.cli.main import cli + +from tests.functional.minimal_cli.fixtures import BaseConfigProject +from tests.functional.utils import up_one + + +class TestClean(BaseConfigProject): + """Test the minimal/happy-path for the CLI using the Click CliRunner""" + + def test_clean(self, runner, project): + result = runner.invoke(cli, ["clean"]) + assert "target" in result.output + assert "dbt_packages" in result.output + assert "logs" in result.output + + +class TestCleanUpLevel(BaseConfigProject): + def test_clean_one_level_up(self, runner, project): + with up_one(): + result = runner.invoke(cli, ["clean"]) + assert result.exit_code == 2 + assert "Runtime Error" in result.output + assert "No dbt_project.yml" in result.output + + +class TestDeps(BaseConfigProject): + def test_deps(self, runner, project): + result = runner.invoke(cli, ["deps"]) + assert "dbt-labs/dbt_utils" in result.output + assert "1.0.0" in result.output + + +class TestLS(BaseConfigProject): + def test_ls(self, runner, project): + runner.invoke(cli, ["deps"]) + ls_result = runner.invoke(cli, ["ls"]) + assert "1 seed" in ls_result.output + assert "1 model" in ls_result.output + assert "5 data tests" in ls_result.output + assert "1 snapshot" in ls_result.output + + +class TestBuild(BaseConfigProject): + def test_build(self, runner, project): + runner.invoke(cli, ["deps"]) + result = runner.invoke(cli, ["build"]) + # 1 seed, 1 model, 2 data tests + assert "PASS=4" in result.output + # 2 data tests + assert "ERROR=2" in result.output + # Singular test + assert "WARN=1" in result.output + # 1 snapshot + assert "SKIP=1" in result.output + + +class TestDocsGenerate(BaseConfigProject): + def test_docs_generate(self, runner, project): + runner.invoke(cli, ["deps"]) + result = runner.invoke(cli, ["docs", "generate"]) + assert "Building catalog" in result.output + assert "Catalog written" in result.output diff --git a/tests/functional/partial_parsing/fixtures.py b/tests/functional/partial_parsing/fixtures.py new file mode 100644 index 000000000..f76d90ad2 --- /dev/null +++ b/tests/functional/partial_parsing/fixtures.py @@ -0,0 +1,1228 @@ +local_dependency__dbt_project_yml = """ + +name: 'local_dep' +version: '1.0' +config-version: 2 + +profile: 'default' + +model-paths: ["models"] +analysis-paths: ["analyses"] +test-paths: ["tests"] +seed-paths: ["seeds"] +macro-paths: ["macros"] + +require-dbt-version: '>=0.1.0' + +target-path: "target" # directory which will store compiled SQL files +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_packages" + + +seeds: + quote_columns: False + +""" + +local_dependency__models__schema_yml = """ +sources: + - name: seed_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + columns: + - name: id + data_tests: + - unique + +""" + +local_dependency__models__model_to_import_sql = """ +select * from {{ ref('seed') }} + +""" + +local_dependency__macros__dep_macro_sql = """ +{% macro some_overridden_macro() -%} +100 +{%- endmacro %} + +""" + +local_dependency__seeds__seed_csv = """id +1 +""" + +empty_schema_with_version_yml = """ + +""" + +schema_sources5_yml = """ + +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_customers + columns: + - name: id + data_tests: + - not_null: + severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" + - unique + - name: first_name + - name: last_name + - name: email + +seeds: + - name: rad_customers + description: "Raw customer data" + columns: + - name: id + data_tests: + - unique + - not_null + - name: first_name + - name: last_name + - name: email + + +""" + +my_macro2_sql = """ +{% macro do_something(foo2, bar2) %} + + select + 'foo' as foo2, + 'var' as bar2 + +{% endmacro %} + +""" + +raw_customers_csv = """id,first_name,last_name,email +1,Michael,Perez,mperez0@chronoengine.com +2,Shawn,Mccoy,smccoy1@reddit.com +3,Kathleen,Payne,kpayne2@cargocollective.com +4,Jimmy,Cooper,jcooper3@cargocollective.com +5,Katherine,Rice,krice4@typepad.com +6,Sarah,Ryan,sryan5@gnu.org +7,Martin,Mcdonald,mmcdonald6@opera.com +8,Frank,Robinson,frobinson7@wunderground.com +9,Jennifer,Franklin,jfranklin8@mail.ru +10,Henry,Welch,hwelch9@list-manage.com +""" + +model_three_disabled2_sql = """ +- Disabled model +{{ config(materialized='table', enabled=False) }} + +with source_data as ( + + select 1 as id + union all + select null as id + +) + +select * +from source_data + +""" + +schema_sources4_yml = """ + +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_customers + columns: + - name: id + data_tests: + - not_null: + severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" + - unique + - every_value_is_blue + - name: first_name + - name: last_name + - name: email + +seeds: + - name: raw_customers + description: "Raw customer data" + columns: + - name: id + data_tests: + - unique + - not_null + - name: first_name + - name: last_name + - name: email + + +""" + +env_var_schema_yml = """ + +models: + - name: model_one + config: + materialized: "{{ env_var('TEST_SCHEMA_VAR') }}" + +""" + +my_test_sql = """ +select + * from {{ ref('customers') }} where first_name = '{{ macro_something() }}' + +""" + +empty_schema_yml = """ + +""" + +schema_models_c_yml = """ + +sources: + - name: seed_source + description: "This is a source override" + overrides: local_dep + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + columns: + - name: id + data_tests: + - unique + - not_null + +""" + +env_var_sources_yml = """ +sources: + - name: seed_sources + schema: "{{ target.schema }}" + database: "{{ env_var('ENV_VAR_DATABASE') }}" + tables: + - name: raw_customers + columns: + - name: id + data_tests: + - not_null: + severity: "{{ env_var('ENV_VAR_SEVERITY') }}" + - unique + - name: first_name + - name: last_name + - name: email + + + +""" + +generic_test_edited_sql = """ +{% test is_odd(model, column_name) %} + +with validation as ( + + select + {{ column_name }} as odd_field2 + + from {{ model }} + +), + +validation_errors as ( + + select + odd_field2 + + from validation + -- if this is true, then odd_field is actually even! + where (odd_field2 % 2) = 0 + +) + +select * +from validation_errors + +{% endtest %} +""" + +schema_sources1_yml = """ +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_customers + columns: + - name: id + data_tests: + - not_null: + severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" + - unique + - name: first_name + - name: last_name + - name: email + + + +""" + +schema_sources3_yml = """ + +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_customers + columns: + - name: id + data_tests: + - not_null: + severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" + - unique + - name: first_name + - name: last_name + - name: email + +exposures: + - name: proxy_for_dashboard + description: "This is for the XXX dashboard" + type: "dashboard" + owner: + name: "Dashboard Tester" + email: "tester@dashboard.com" + depends_on: + - ref("model_one") + - source("seed_sources", "raw_customers") + + +""" + +my_analysis_sql = """ +select * from customers + +""" + +schema_sources2_yml = """ + +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_customers + columns: + - name: id + data_tests: + - not_null: + severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" + - unique + - name: first_name + - name: last_name + - name: email + +exposures: + - name: proxy_for_dashboard + description: "This is for the XXX dashboard" + type: "dashboard" + owner: + name: "Dashboard Tester" + email: "tester@dashboard.com" + depends_on: + - ref("model_one") + - ref("raw_customers") + - source("seed_sources", "raw_customers") + + +""" + +model_color_sql = """ +select 'blue' as fun + +""" + +my_metric_yml = """ +metrics: + - name: new_customers + label: New Customers + model: customers + description: "The number of paid customers who are using the product" + type: simple + type_params: + measure: + name: customers + filter: "{{ Dimension('id__loves_dbt') }} is true" + +meta: + is_okr: True + tags: + - okrs + + + +""" + +env_var_schema2_yml = """ + +models: + - name: model_one + config: + materialized: "{{ env_var('TEST_SCHEMA_VAR') }}" + data_tests: + - check_color: + column_name: fun + color: "env_var('ENV_VAR_COLOR')" + + +""" + +gsm_override_sql = """ +- custom macro +{% macro generate_schema_name(schema_name, node) %} + + {{ schema_name }}_{{ target.schema }} + +{% endmacro %} + +""" + +model_four1_sql = """ +select * from {{ ref('model_three') }} + +""" + +model_one_sql = """ +select 1 as fun + +""" + +metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day +""" + +env_var_schema3_yml = """ + +models: + - name: model_one + config: + materialized: "{{ env_var('TEST_SCHEMA_VAR') }}" + data_tests: + - check_color: + column_name: fun + color: "env_var('ENV_VAR_COLOR')" + +exposures: + - name: proxy_for_dashboard + description: "This is for the XXX dashboard" + type: "dashboard" + owner: + name: "{{ env_var('ENV_VAR_OWNER') }}" + email: "tester@dashboard.com" + depends_on: + - ref("model_color") + - source("seed_sources", "raw_customers") + +""" + +people_semantic_models_yml = """ +version: 2 + +semantic_models: + - name: semantic_people + model: ref('people') + dimensions: + - name: favorite_color + type: categorical + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + - name: people + agg: count + expr: id + entities: + - name: id + type: primary + defaults: + agg_time_dimension: created_at +""" + +env_var_metrics_yml = """ + +metrics: + + - name: number_of_people + description: Total count of people + label: "Number of people" + type: simple + type_params: + measure: people + meta: + my_meta: '{{ env_var("ENV_VAR_METRICS") }}' + + - name: collective_tenure + description: Total number of years of team experience + label: "Collective tenure" + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + +""" + +customers_sql = """ +with source as ( + + select * from {{ source('seed_sources', 'raw_customers') }} + +), + +renamed as ( + + select + id as customer_id, + first_name, + last_name, + email + + from source + +) + +select * from renamed + +""" + +model_four2_sql = """ +select fun from {{ ref('model_one') }} + +""" + +env_var_model_sql = """ +select '{{ env_var('ENV_VAR_TEST') }}' as vartest + +""" + +env_var_model_one_sql = """ +select 'blue' as fun + +""" + +custom_schema_tests2_sql = """ +{% test type_one(model) %} + + select * from ( + + select * from {{ model }} + union all + select * from {{ ref('model_b') }} + + ) as Foo + +{% endtest %} + +{% test type_two(model) %} + + {{ config(severity = "ERROR") }} + + select * from {{ model }} + +{% endtest %} + +""" + +metric_model_a_sql = """ +{% + set metric_list = [ + metric('number_of_people'), + metric('collective_tenure') + ] +%} + +{% if not execute %} + + {% set metric_names = [] %} + {% for m in metric_list %} + {% do metric_names.append(m.metric_name) %} + {% endfor %} + + -- this config does nothing, but it lets us check these values + {{ config(metric_names = metric_names) }} + +{% endif %} + + +select 1 as fun + +""" + +model_b_sql = """ +select 1 as notfun + +""" + +customers2_md = """ +{% docs customer_table %} + +LOTS of customer data + +{% enddocs %} + +""" + +custom_schema_tests1_sql = """ +{% test type_one(model) %} + + select * from ( + + select * from {{ model }} + union all + select * from {{ ref('model_b') }} + + ) as Foo + +{% endtest %} + +{% test type_two(model) %} + + {{ config(severity = "WARN") }} + + select * from {{ model }} + +{% endtest %} + +""" + +people_metrics_yml = """ + +metrics: + + - name: number_of_people + description: Total count of people + label: "Number of people" + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + + - name: collective_tenure + description: Total number of years of team experience + label: "Collective tenure" + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + +""" + +people_sql = """ +select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at +union all +select 1 as id, 'Jeremy' as first_name, 'Cohen' as last_name, 'indigo' as favorite_color, true as loves_dbt, 4 as tenure, current_timestamp as created_at + +""" + +orders_sql = """ +select 1 as id, 101 as user_id, 'pending' as status + +""" + +orders_downstream_sql = """ +select * from {{ ref('orders') }} + +""" + +model_a_sql = """ +select 1 as fun + +""" + +model_three_disabled_sql = """ +{{ config(materialized='table', enabled=False) }} + +with source_data as ( + + select 1 as id + union all + select null as id + +) + +select * +from source_data + +""" + +models_schema2b_yml = """ + +models: + - name: model_one + description: "The first model" + - name: model_three + description: "The third model" + columns: + - name: id + data_tests: + - not_null + +""" + +env_var_macros_yml = """ +macros: + - name: do_something + description: "This is a test macro" + meta: + some_key: "{{ env_var('ENV_VAR_SOME_KEY') }}" + + +""" + +models_schema4_yml = """ + +models: + - name: model_one + description: "The first model" + - name: model_three + description: "The third model" + config: + enabled: false + columns: + - name: id + data_tests: + - unique + +""" + +model_two_sql = """ +select 1 as notfun + +""" + +generic_test_schema_yml = """ + +models: + - name: orders + description: "Some order data" + columns: + - name: id + data_tests: + - unique + - is_odd + +""" + +customers1_md = """ +{% docs customer_table %} + +This table contains customer data + +{% enddocs %} + +""" + +model_three_modified_sql = """ +{{ config(materialized='table') }} + +with source_data as ( + + {#- This is model three #} + + select 1 as id + union all + select null as id + +) + +select * +from source_data + +""" + +macros_yml = """ +macros: + - name: do_something + description: "This is a test macro" + +""" + +test_color_sql = """ +{% test check_color(model, column_name, color) %} + + select * + from {{ model }} + where {{ column_name }} = '{{ color }}' + +{% endtest %} + +""" + +models_schema2_yml = """ + +models: + - name: model_one + description: "The first model" + - name: model_three + description: "The third model" + columns: + - name: id + data_tests: + - unique + +""" + +gsm_override2_sql = """ +- custom macro xxxx +{% macro generate_schema_name(schema_name, node) %} + + {{ schema_name }}_{{ target.schema }} + +{% endmacro %} + +""" + +models_schema3_yml = """ + +models: + - name: model_one + description: "The first model" + - name: model_three + description: "The third model" + data_tests: + - unique +macros: + - name: do_something + description: "This is a test macro" + +""" + +generic_test_sql = """ +{% test is_odd(model, column_name) %} + +with validation as ( + + select + {{ column_name }} as odd_field + + from {{ model }} + +), + +validation_errors as ( + + select + odd_field + + from validation + -- if this is true, then odd_field is actually even! + where (odd_field % 2) = 0 + +) + +select * +from validation_errors + +{% endtest %} +""" + +env_var_model_test_yml = """ +models: + - name: model_color + columns: + - name: fun + data_tests: + - unique: + enabled: "{{ env_var('ENV_VAR_ENABLED', True) }}" + +""" + +model_three_sql = """ +{{ config(materialized='table') }} + +with source_data as ( + + select 1 as id + union all + select null as id + +) + +select * +from source_data + +""" + +ref_override2_sql = """ +- Macro to override ref xxxx +{% macro ref(modelname) %} +{% do return(builtins.ref(modelname)) %} +{% endmacro %} + +""" + +models_schema1_yml = """ + +models: + - name: model_one + description: "The first model" + +""" + +macros_schema_yml = """ + + +models: + - name: model_a + data_tests: + - type_one + - type_two + +""" + +models_versions_schema_yml = """ + +models: + - name: model_one + description: "The first model" + versions: + - v: 1 + - v: 2 +""" + +models_versions_defined_in_schema_yml = """ + +models: + - name: model_one + description: "The first model" + versions: + - v: 1 + - v: 2 + defined_in: model_one_different +""" + +models_versions_updated_schema_yml = """ + +models: + - name: model_one + latest_version: 1 + description: "The first model" + versions: + - v: 1 + - v: 2 + defined_in: model_one_different +""" + +my_macro_sql = """ +{% macro do_something(foo2, bar2) %} + + select + '{{ foo2 }}' as foo2, + '{{ bar2 }}' as bar2 + +{% endmacro %} + +""" + +snapshot_sql = """ +{% snapshot orders_snapshot %} + +{{ + config( + target_schema=schema, + strategy='check', + unique_key='id', + check_cols=['status'], + ) +}} + +select * from {{ ref('orders') }} + +{% endsnapshot %} + +{% snapshot orders2_snapshot %} + +{{ + config( + target_schema=schema, + strategy='check', + unique_key='id', + check_cols=['order_date'], + ) +}} + +select * from {{ ref('orders') }} + +{% endsnapshot %} + +""" + +models_schema4b_yml = """ + +models: + - name: model_one + description: "The first model" + - name: model_three + description: "The third model" + config: + enabled: true + columns: + - name: id + data_tests: + - unique + +""" + +test_macro_sql = """ +{% macro macro_something() %} + + {% do return('macro_something') %} + +{% endmacro %} + +""" + +people_metrics2_yml = """ + +metrics: + + - name: number_of_people + description: Total count of people + label: "Number of people" + type: simple + type_params: + measure: people + meta: + my_meta: 'replaced' + + - name: collective_tenure + description: Total number of years of team experience + label: "Collective tenure" + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + +""" + +generic_schema_yml = """ + +models: + - name: orders + description: "Some order data" + columns: + - name: id + data_tests: + - unique + +""" + + +groups_schema_yml_one_group = """ + +groups: + - name: test_group + owner: + name: test_group_owner + +models: + - name: orders + description: "Some order data" +""" + + +groups_schema_yml_two_groups = """ + +groups: + - name: test_group + owner: + name: test_group_owner + - name: test_group2 + owner: + name: test_group_owner2 + +models: + - name: orders + description: "Some order data" +""" + + +groups_schema_yml_two_groups_private_orders_valid_access = """ + +groups: + - name: test_group + owner: + name: test_group_owner + - name: test_group2 + owner: + name: test_group_owner2 + +models: + - name: orders + group: test_group + access: private + description: "Some order data" + - name: orders_downstream + group: test_group + description: "Some order data" +""" + +groups_schema_yml_two_groups_private_orders_invalid_access = """ + +groups: + - name: test_group + owner: + name: test_group_owner + - name: test_group2 + owner: + name: test_group_owner2 + +models: + - name: orders + group: test_group2 + access: private + description: "Some order data" + - name: orders_downstream + group: test_group + description: "Some order data" +""" + +groups_schema_yml_one_group_model_in_group2 = """ + +groups: + - name: test_group + owner: + name: test_group_owner + +models: + - name: orders + description: "Some order data" + config: + group: test_group2 +""" + +groups_schema_yml_two_groups_edited = """ + +groups: + - name: test_group + owner: + name: test_group_owner + - name: test_group2_edited + owner: + name: test_group_owner2 + +models: + - name: orders + description: "Some order data" +""" + + +snapshot2_sql = """ +- add a comment +{% snapshot orders_snapshot %} + +{{ + config( + target_schema=schema, + strategy='check', + unique_key='id', + check_cols=['status'], + ) +}} + +select * from {{ ref('orders') }} + +{% endsnapshot %} + +{% snapshot orders2_snapshot %} + +{{ + config( + target_schema=schema, + strategy='check', + unique_key='id', + check_cols=['order_date'], + ) +}} + +select * from {{ ref('orders') }} + +{% endsnapshot %} + +""" + +sources_tests2_sql = """ + +{% test every_value_is_blue(model, column_name) %} + + select * + from {{ model }} + where {{ column_name }} != 99 + +{% endtest %} + + +""" + +people_metrics3_yml = """ + +metrics: + + - name: number_of_people + description: Total count of people + label: "Number of people" + type: simple + type_params: + measure: people + meta: + my_meta: 'replaced' + +""" + +ref_override_sql = """ +- Macro to override ref +{% macro ref(modelname) %} +{% do return(builtins.ref(modelname)) %} +{% endmacro %} + +""" + +test_macro2_sql = """ +{% macro macro_something() %} + + {% do return('some_name') %} + +{% endmacro %} + +""" + +env_var_macro_sql = """ +{% macro do_something(foo2, bar2) %} + + select + '{{ foo2 }}' as foo2, + '{{ bar2 }}' as bar2 + +{% endmacro %} + +""" + +sources_tests1_sql = """ + +{% test every_value_is_blue(model, column_name) %} + + select * + from {{ model }} + where {{ column_name }} = 9999 + +{% endtest %} + + +""" diff --git a/tests/functional/partial_parsing/test_file_diff.py b/tests/functional/partial_parsing/test_file_diff.py new file mode 100644 index 000000000..3680a94e0 --- /dev/null +++ b/tests/functional/partial_parsing/test_file_diff.py @@ -0,0 +1,64 @@ +import os + +from dbt.tests.util import run_dbt, write_artifact, write_file +import pytest + +from tests.functional.partial_parsing.fixtures import model_one_sql, model_two_sql + + +first_file_diff = { + "deleted": [], + "changed": [], + "added": [{"path": "models/model_one.sql", "content": "select 1 as fun"}], +} + + +second_file_diff = { + "deleted": [], + "changed": [], + "added": [{"path": "models/model_two.sql", "content": "select 123 as notfun"}], +} + + +class TestFileDiffPaths: + def test_file_diffs(self, project): + + os.environ["DBT_PP_FILE_DIFF_TEST"] = "true" + + run_dbt(["deps"]) + run_dbt(["seed"]) + + # We start with an empty project + results = run_dbt() + + write_artifact(first_file_diff, "file_diff.json") + results = run_dbt() + assert len(results) == 1 + + write_artifact(second_file_diff, "file_diff.json") + results = run_dbt() + assert len(results) == 2 + + +class TestFileDiffs: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + def test_no_file_diffs(self, project): + # We start with a project with one model + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 1 + + # add a model file + write_file(model_two_sql, project.project_root, "models", "model_two.sql") + + # parse without computing a file diff + manifest = run_dbt(["--partial-parse", "--no-partial-parse-file-diff", "parse"]) + assert len(manifest.nodes) == 1 + + # default behaviour - parse with computing a file diff + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 2 diff --git a/tests/functional/partial_parsing/test_partial_parsing.py b/tests/functional/partial_parsing/test_partial_parsing.py new file mode 100644 index 000000000..15b89a4c0 --- /dev/null +++ b/tests/functional/partial_parsing/test_partial_parsing.py @@ -0,0 +1,824 @@ +import os +import re +from unittest import mock + +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import ( + get_manifest, + rename_dir, + rm_file, + run_dbt, + run_dbt_and_capture, + write_file, +) +from dbt.contracts.files import ParseFileType +from dbt.contracts.results import TestStatus +from dbt.plugins.manifest import ModelNodeArgs, PluginNodes +from dbt_common.exceptions import CompilationError +import pytest + +from tests.functional.partial_parsing.fixtures import ( + custom_schema_tests1_sql, + custom_schema_tests2_sql, + customers_sql, + customers1_md, + customers2_md, + empty_schema_with_version_yml, + empty_schema_yml, + generic_schema_yml, + generic_test_edited_sql, + generic_test_schema_yml, + generic_test_sql, + gsm_override_sql, + gsm_override2_sql, + local_dependency__dbt_project_yml, + local_dependency__macros__dep_macro_sql, + local_dependency__models__model_to_import_sql, + local_dependency__models__schema_yml, + local_dependency__seeds__seed_csv, + macros_schema_yml, + macros_yml, + model_a_sql, + model_b_sql, + model_four1_sql, + model_four2_sql, + model_one_sql, + model_three_disabled_sql, + model_three_disabled2_sql, + model_three_modified_sql, + model_three_sql, + model_two_sql, + models_schema1_yml, + models_schema2_yml, + models_schema2b_yml, + models_schema3_yml, + models_schema4_yml, + models_schema4b_yml, + my_analysis_sql, + my_macro_sql, + my_macro2_sql, + my_test_sql, + orders_sql, + raw_customers_csv, + ref_override_sql, + ref_override2_sql, + schema_models_c_yml, + schema_sources1_yml, + schema_sources2_yml, + schema_sources3_yml, + schema_sources4_yml, + schema_sources5_yml, + snapshot_sql, + snapshot2_sql, + sources_tests1_sql, + sources_tests2_sql, + test_macro_sql, + test_macro2_sql, +) +from tests.functional.utils import up_one + + +os.environ["DBT_PP_TEST"] = "true" + + +def normalize(path): + return os.path.normcase(os.path.normpath(path)) + + +class TestModels: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + def test_pp_models(self, project): + # initial run + # run_dbt(['clean']) + results = run_dbt(["run"]) + assert len(results) == 1 + + # add a model file + write_file(model_two_sql, project.project_root, "models", "model_two.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + + # add a schema file + write_file(models_schema1_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + assert "model.test.model_one" in manifest.nodes + model_one_node = manifest.nodes["model.test.model_one"] + assert model_one_node.description == "The first model" + assert model_one_node.patch_path == "test://" + normalize("models/schema.yml") + + # add a model and a schema file (with a test) at the same time + write_file(models_schema2_yml, project.project_root, "models", "schema.yml") + write_file(model_three_sql, project.project_root, "models", "model_three.sql") + results = run_dbt(["--partial-parse", "test"], expect_pass=False) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + project_files = [f for f in manifest.files if f.startswith("test://")] + assert len(project_files) == 4 + model_3_file_id = "test://" + normalize("models/model_three.sql") + assert model_3_file_id in manifest.files + model_three_file = manifest.files[model_3_file_id] + assert model_three_file.parse_file_type == ParseFileType.Model + assert type(model_three_file).__name__ == "SourceFile" + model_three_node = manifest.nodes[model_three_file.nodes[0]] + schema_file_id = "test://" + normalize("models/schema.yml") + assert model_three_node.patch_path == schema_file_id + assert model_three_node.description == "The third model" + schema_file = manifest.files[schema_file_id] + assert type(schema_file).__name__ == "SchemaSourceFile" + assert len(schema_file.data_tests) == 1 + tests = schema_file.get_all_test_ids() + assert tests == ["test.test.unique_model_three_id.6776ac8160"] + unique_test_id = tests[0] + assert unique_test_id in manifest.nodes + + # modify model sql file, ensure description still there + write_file(model_three_modified_sql, project.project_root, "models", "model_three.sql") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + model_id = "model.test.model_three" + assert model_id in manifest.nodes + model_three_node = manifest.nodes[model_id] + assert model_three_node.description == "The third model" + + # Change the model 3 test from unique to not_null + write_file(models_schema2b_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "test"], expect_pass=False) + manifest = get_manifest(project.project_root) + schema_file_id = "test://" + normalize("models/schema.yml") + schema_file = manifest.files[schema_file_id] + tests = schema_file.get_all_test_ids() + assert tests == ["test.test.not_null_model_three_id.3162ce0a6f"] + not_null_test_id = tests[0] + assert not_null_test_id in manifest.nodes.keys() + assert unique_test_id not in manifest.nodes.keys() + assert len(results) == 1 + + # go back to previous version of schema file, removing patch, test, and model for model three + write_file(models_schema1_yml, project.project_root, "models", "schema.yml") + rm_file(project.project_root, "models", "model_three.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + + # remove schema file, still have 3 models + write_file(model_three_sql, project.project_root, "models", "model_three.sql") + rm_file(project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + manifest = get_manifest(project.project_root) + schema_file_id = "test://" + normalize("models/schema.yml") + assert schema_file_id not in manifest.files + project_files = [f for f in manifest.files if f.startswith("test://")] + assert len(project_files) == 3 + + # Put schema file back and remove a model + # referred to in schema file + write_file(models_schema2_yml, project.project_root, "models", "schema.yml") + rm_file(project.project_root, "models", "model_three.sql") + with pytest.raises(CompilationError): + results = run_dbt(["--partial-parse", "--warn-error", "run"]) + + # Put model back again + write_file(model_three_sql, project.project_root, "models", "model_three.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # Add model four refing model three + write_file(model_four1_sql, project.project_root, "models", "model_four.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 4 + + # Remove model_three and change model_four to ref model_one + # and change schema file to remove model_three + rm_file(project.project_root, "models", "model_three.sql") + write_file(model_four2_sql, project.project_root, "models", "model_four.sql") + write_file(models_schema1_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # Remove model four, put back model three, put back schema file + write_file(model_three_sql, project.project_root, "models", "model_three.sql") + write_file(models_schema2_yml, project.project_root, "models", "schema.yml") + rm_file(project.project_root, "models", "model_four.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # disable model three in the schema file + write_file(models_schema4_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + + # update enabled config to be true for model three in the schema file + write_file(models_schema4b_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # disable model three in the schema file again + write_file(models_schema4_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + + # remove disabled config for model three in the schema file to check it gets enabled + write_file(models_schema4b_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # Add a macro + write_file(my_macro_sql, project.project_root, "macros", "my_macro.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + manifest = get_manifest(project.project_root) + macro_id = "macro.test.do_something" + assert macro_id in manifest.macros + + # Modify the macro + write_file(my_macro2_sql, project.project_root, "macros", "my_macro.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # Add a macro patch + write_file(models_schema3_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # Remove the macro + rm_file(project.project_root, "macros", "my_macro.sql") + with pytest.raises(CompilationError): + results = run_dbt(["--partial-parse", "--warn-error", "run"]) + + # put back macro file, got back to schema file with no macro + # add separate macro patch schema file + write_file(models_schema2_yml, project.project_root, "models", "schema.yml") + write_file(my_macro_sql, project.project_root, "macros", "my_macro.sql") + write_file(macros_yml, project.project_root, "macros", "macros.yml") + results = run_dbt(["--partial-parse", "run"]) + + # delete macro and schema file + rm_file(project.project_root, "macros", "my_macro.sql") + rm_file(project.project_root, "macros", "macros.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # Add an empty schema file + write_file(empty_schema_yml, project.project_root, "models", "eschema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # Add version to empty schema file + write_file(empty_schema_with_version_yml, project.project_root, "models", "eschema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # Disable model_three + write_file(model_three_disabled_sql, project.project_root, "models", "model_three.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model_id = "model.test.model_three" + assert model_id in manifest.disabled + assert model_id not in manifest.nodes + + # Edit disabled model three + write_file(model_three_disabled2_sql, project.project_root, "models", "model_three.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model_id = "model.test.model_three" + assert model_id in manifest.disabled + assert model_id not in manifest.nodes + + # Remove disabled from model three + write_file(model_three_sql, project.project_root, "models", "model_three.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + manifest = get_manifest(project.project_root) + model_id = "model.test.model_three" + assert model_id in manifest.nodes + assert model_id not in manifest.disabled + + +class TestSources: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + def test_pp_sources(self, project): + # initial run + write_file(raw_customers_csv, project.project_root, "seeds", "raw_customers.csv") + write_file(sources_tests1_sql, project.project_root, "macros", "tests.sql") + results = run_dbt(["run"]) + assert len(results) == 1 + + # Partial parse running 'seed' + run_dbt(["--partial-parse", "seed"]) + manifest = get_manifest(project.project_root) + seed_file_id = "test://" + normalize("seeds/raw_customers.csv") + assert seed_file_id in manifest.files + + # Add another seed file + write_file(raw_customers_csv, project.project_root, "seeds", "more_customers.csv") + run_dbt(["--partial-parse", "run"]) + seed_file_id = "test://" + normalize("seeds/more_customers.csv") + manifest = get_manifest(project.project_root) + assert seed_file_id in manifest.files + seed_id = "seed.test.more_customers" + assert seed_id in manifest.nodes + + # Remove seed file and add a schema files with a source referring to raw_customers + rm_file(project.project_root, "seeds", "more_customers.csv") + write_file(schema_sources1_yml, project.project_root, "models", "sources.yml") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + assert len(manifest.sources) == 1 + file_id = "test://" + normalize("models/sources.yml") + assert file_id in manifest.files + + # add a model referring to raw_customers source + write_file(customers_sql, project.project_root, "models", "customers.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + + # remove sources schema file + rm_file(project.project_root, "models", "sources.yml") + with pytest.raises(CompilationError): + results = run_dbt(["--partial-parse", "run"]) + + # put back sources and add an exposures file + write_file(schema_sources2_yml, project.project_root, "models", "sources.yml") + results = run_dbt(["--partial-parse", "run"]) + + # remove seed referenced in exposures file + rm_file(project.project_root, "seeds", "raw_customers.csv") + with pytest.raises(CompilationError): + results = run_dbt(["--partial-parse", "run"]) + + # put back seed and remove depends_on from exposure + write_file(raw_customers_csv, project.project_root, "seeds", "raw_customers.csv") + write_file(schema_sources3_yml, project.project_root, "models", "sources.yml") + results = run_dbt(["--partial-parse", "run"]) + + # Add seed config with test to schema.yml, remove exposure + write_file(schema_sources4_yml, project.project_root, "models", "sources.yml") + results = run_dbt(["--partial-parse", "run"]) + + # Change seed name to wrong name + write_file(schema_sources5_yml, project.project_root, "models", "sources.yml") + with pytest.raises(CompilationError): + results = run_dbt(["--partial-parse", "--warn-error", "run"]) + + # Put back seed name to right name + write_file(schema_sources4_yml, project.project_root, "models", "sources.yml") + results = run_dbt(["--partial-parse", "run"]) + + # Add docs file customers.md + write_file(customers1_md, project.project_root, "models", "customers.md") + results = run_dbt(["--partial-parse", "run"]) + + # Change docs file customers.md + write_file(customers2_md, project.project_root, "models", "customers.md") + results = run_dbt(["--partial-parse", "run"]) + + # Delete docs file + rm_file(project.project_root, "models", "customers.md") + results = run_dbt(["--partial-parse", "run"]) + + # Add a data test + write_file(test_macro_sql, project.project_root, "macros", "test-macro.sql") + write_file(my_test_sql, project.project_root, "tests", "my_test.sql") + results = run_dbt(["--partial-parse", "test"]) + manifest = get_manifest(project.project_root) + assert len(manifest.nodes) == 9 + test_id = "test.test.my_test" + assert test_id in manifest.nodes + + # Change macro that data test depends on + write_file(test_macro2_sql, project.project_root, "macros", "test-macro.sql") + results = run_dbt(["--partial-parse", "test"]) + manifest = get_manifest(project.project_root) + + # Add an analysis + write_file(my_analysis_sql, project.project_root, "analyses", "my_analysis.sql") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + + # Remove data test + rm_file(project.project_root, "tests", "my_test.sql") + results = run_dbt(["--partial-parse", "test"]) + manifest = get_manifest(project.project_root) + assert len(manifest.nodes) == 9 + + # Remove analysis + rm_file(project.project_root, "analyses", "my_analysis.sql") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + assert len(manifest.nodes) == 8 + + # Change source test + write_file(sources_tests2_sql, project.project_root, "macros", "tests.sql") + results = run_dbt(["--partial-parse", "run"]) + + +class TestPartialParsingDependency: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": local_dependency__dbt_project_yml, + "models": { + "schema.yml": local_dependency__models__schema_yml, + "model_to_import.sql": local_dependency__models__model_to_import_sql, + }, + "macros": {"dep_macro.sql": local_dependency__macros__dep_macro_sql}, + "seeds": {"seed.csv": local_dependency__seeds__seed_csv}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + def test_parsing_with_dependency(self, project): + run_dbt(["clean"]) + run_dbt(["deps"]) + run_dbt(["seed"]) + run_dbt(["run"]) + + # Add a source override + write_file(schema_models_c_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + assert len(manifest.sources) == 1 + source_id = "source.local_dep.seed_source.seed" + assert source_id in manifest.sources + # We have 1 root model, 1 local_dep model, 1 local_dep seed, 1 local_dep source test, 2 root source tests + assert len(manifest.nodes) == 5 + test_id = "test.local_dep.source_unique_seed_source_seed_id.afa94935ed" + assert test_id in manifest.nodes + + # Remove a source override + rm_file(project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + assert len(manifest.sources) == 1 + + +class TestNestedMacros: + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": model_a_sql, + "model_b.sql": model_b_sql, + "schema.yml": macros_schema_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "custom_schema_tests.sql": custom_schema_tests1_sql, + } + + def test_nested_macros(self, project): + results = run_dbt() + assert len(results) == 2 + manifest = get_manifest(project.project_root) + macro_child_map = manifest.build_macro_child_map() + macro_unique_id = "macro.test.test_type_two" + assert macro_unique_id in macro_child_map + + results = run_dbt(["test"], expect_pass=False) + results = sorted(results, key=lambda r: r.node.name) + assert len(results) == 2 + # type_one_model_a_ + assert results[0].status == TestStatus.Fail + assert re.search(r"union all", results[0].node.compiled_code) + # type_two_model_a_ + assert results[1].status == TestStatus.Warn + assert results[1].node.config.severity == "WARN" + + write_file( + custom_schema_tests2_sql, project.project_root, "macros", "custom_schema_tests.sql" + ) + results = run_dbt(["--partial-parse", "test"], expect_pass=False) + manifest = get_manifest(project.project_root) + test_node_id = "test.test.type_two_model_a_.842bc6c2a7" + assert test_node_id in manifest.nodes + results = sorted(results, key=lambda r: r.node.name) + assert len(results) == 2 + # type_two_model_a_ + assert results[1].status == TestStatus.Fail + assert results[1].node.config.severity == "ERROR" + + +class TestSkipMacros: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + "eschema.yml": empty_schema_yml, + } + + def test_skip_macros(self, project): + # initial run so we have a msgpack file + # includes empty_schema file for bug #4850 + results = run_dbt() + + # add a new ref override macro + write_file(ref_override_sql, project.project_root, "macros", "ref_override.sql") + results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) + assert "Starting full parse." in log_output + + # modify a ref override macro + write_file(ref_override2_sql, project.project_root, "macros", "ref_override.sql") + results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) + assert "Starting full parse." in log_output + + # remove a ref override macro + rm_file(project.project_root, "macros", "ref_override.sql") + results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) + assert "Starting full parse." in log_output + + # custom generate_schema_name macro + write_file(gsm_override_sql, project.project_root, "macros", "gsm_override.sql") + results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) + assert "Starting full parse." in log_output + + # change generate_schema_name macro + write_file(gsm_override2_sql, project.project_root, "macros", "gsm_override.sql") + results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) + assert "Starting full parse." in log_output + + +class TestSnapshots: + @pytest.fixture(scope="class") + def models(self): + return { + "orders.sql": orders_sql, + } + + def test_pp_snapshots(self, project): + + # initial run + results = run_dbt() + assert len(results) == 1 + + # add snapshot + write_file(snapshot_sql, project.project_root, "snapshots", "snapshot.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + snapshot_id = "snapshot.test.orders_snapshot" + assert snapshot_id in manifest.nodes + snapshot2_id = "snapshot.test.orders2_snapshot" + assert snapshot2_id in manifest.nodes + + # run snapshot + results = run_dbt(["--partial-parse", "snapshot"]) + assert len(results) == 2 + + # modify snapshot + write_file(snapshot2_sql, project.project_root, "snapshots", "snapshot.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 1 + + # delete snapshot + rm_file(project.project_root, "snapshots", "snapshot.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 1 + + +class TestTests: + @pytest.fixture(scope="class") + def models(self): + return { + "orders.sql": orders_sql, + "schema.yml": generic_schema_yml, + } + + @pytest.fixture(scope="class") + def tests(self): + # Make sure "generic" directory is created + return {"generic": {"readme.md": ""}} + + def test_pp_generic_tests(self, project): + + # initial run + results = run_dbt() + assert len(results) == 1 + manifest = get_manifest(project.project_root) + expected_nodes = ["model.test.orders", "test.test.unique_orders_id.1360ecc70e"] + assert expected_nodes == list(manifest.nodes.keys()) + + # add generic test in test-path + write_file(generic_test_sql, project.project_root, "tests", "generic", "generic_test.sql") + write_file(generic_test_schema_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + test_id = "test.test.is_odd_orders_id.82834fdc5b" + assert test_id in manifest.nodes + expected_nodes = [ + "model.test.orders", + "test.test.unique_orders_id.1360ecc70e", + "test.test.is_odd_orders_id.82834fdc5b", + ] + assert expected_nodes == list(manifest.nodes.keys()) + + # edit generic test in test-path + write_file( + generic_test_edited_sql, project.project_root, "tests", "generic", "generic_test.sql" + ) + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + test_id = "test.test.is_odd_orders_id.82834fdc5b" + assert test_id in manifest.nodes + expected_nodes = [ + "model.test.orders", + "test.test.unique_orders_id.1360ecc70e", + "test.test.is_odd_orders_id.82834fdc5b", + ] + assert expected_nodes == list(manifest.nodes.keys()) + + +class TestExternalModels: + @pytest.fixture(scope="class") + def external_model_node(self): + return ModelNodeArgs( + name="external_model", + package_name="external", + identifier="test_identifier", + schema="test_schema", + ) + + @pytest.fixture(scope="class") + def external_model_node_versioned(self): + return ModelNodeArgs( + name="external_model_versioned", + package_name="external", + identifier="test_identifier_v1", + schema="test_schema", + version=1, + ) + + @pytest.fixture(scope="class") + def external_model_node_depends_on(self): + return ModelNodeArgs( + name="external_model_depends_on", + package_name="external", + identifier="test_identifier_depends_on", + schema="test_schema", + depends_on_nodes=["model.external.external_model_depends_on_parent"], + ) + + @pytest.fixture(scope="class") + def external_model_node_depends_on_parent(self): + return ModelNodeArgs( + name="external_model_depends_on_parent", + package_name="external", + identifier="test_identifier_depends_on_parent", + schema="test_schema", + ) + + @pytest.fixture(scope="class") + def models(self): + return {"model_one.sql": model_one_sql} + + @mock.patch("dbt.plugins.get_plugin_manager") + def test_pp_external_models( + self, + get_plugin_manager, + project, + external_model_node, + external_model_node_versioned, + external_model_node_depends_on, + external_model_node_depends_on_parent, + ): + # initial plugin - one external model + external_nodes = PluginNodes() + external_nodes.add_model(external_model_node) + get_plugin_manager.return_value.get_nodes.return_value = external_nodes + + # initial parse + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 2 + assert set(manifest.nodes.keys()) == { + "model.external.external_model", + "model.test.model_one", + } + assert len(manifest.external_node_unique_ids) == 1 + assert manifest.external_node_unique_ids == ["model.external.external_model"] + + # add a model file + write_file(model_two_sql, project.project_root, "models", "model_two.sql") + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 3 + + # add an external model + external_nodes.add_model(external_model_node_versioned) + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 4 + assert len(manifest.external_node_unique_ids) == 2 + + # add a model file that depends on external model + write_file( + "SELECT * FROM {{ref('external', 'external_model')}}", + project.project_root, + "models", + "model_depends_on_external.sql", + ) + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 5 + assert len(manifest.external_node_unique_ids) == 2 + + # remove a model file that depends on external model + rm_file(project.project_root, "models", "model_depends_on_external.sql") + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 4 + + # add an external node with depends on + external_nodes.add_model(external_model_node_depends_on) + external_nodes.add_model(external_model_node_depends_on_parent) + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 6 + assert len(manifest.external_node_unique_ids) == 4 + + # skip files parsing - ensure no issues + run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 6 + assert len(manifest.external_node_unique_ids) == 4 + + +class TestPortablePartialParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + @pytest.fixture(scope="class") + def local_dependency_files(self): + return { + "dbt_project.yml": local_dependency__dbt_project_yml, + "models": { + "schema.yml": local_dependency__models__schema_yml, + "model_to_import.sql": local_dependency__models__model_to_import_sql, + }, + "macros": {"dep_macro.sql": local_dependency__macros__dep_macro_sql}, + "seeds": {"seed.csv": local_dependency__seeds__seed_csv}, + } + + def rename_project_root(self, project, new_project_root): + with up_one(new_project_root): + rename_dir(project.project_root, new_project_root) + project.project_root = new_project_root + # flags.project_dir is set during the project test fixture, and is persisted across run_dbt calls, + # so it needs to be reset between invocations + # flags.set_from_args(Namespace(PROJECT_DIR=new_project_root), None) + + @pytest.fixture(scope="class", autouse=True) + def initial_run_and_rename_project_dir(self, project, local_dependency_files): + initial_project_root = project.project_root + renamed_project_root = os.path.join(project.project_root.dirname, "renamed_project_dir") + + write_project_files(project.project_root, "local_dependency", local_dependency_files) + + # initial run + run_dbt(["deps"]) + assert len(run_dbt(["seed"])) == 1 + assert len(run_dbt(["run"])) == 2 + + self.rename_project_root(project, renamed_project_root) + yield + self.rename_project_root(project, initial_project_root) + + def test_pp_renamed_project_dir_unchanged_project_contents(self, project): + # partial parse same project in new absolute dir location, using partial_parse.msgpack created in previous dir + run_dbt(["deps"]) + assert len(run_dbt(["--partial-parse", "seed"])) == 1 + assert len(run_dbt(["--partial-parse", "run"])) == 2 + + def test_pp_renamed_project_dir_changed_project_contents(self, project): + write_file(model_two_sql, project.project_root, "models", "model_two.sql") + + # partial parse changed project in new absolute dir location, using partial_parse.msgpack created in previous dir + run_dbt(["deps"]) + len(run_dbt(["--partial-parse", "seed"])) == 1 + len(run_dbt(["--partial-parse", "run"])) == 3 diff --git a/tests/functional/partial_parsing/test_pp_disabled_config.py b/tests/functional/partial_parsing/test_pp_disabled_config.py new file mode 100644 index 000000000..8a4ece9d2 --- /dev/null +++ b/tests/functional/partial_parsing/test_pp_disabled_config.py @@ -0,0 +1,224 @@ +from dbt.tests.util import get_manifest, run_dbt, write_file +import pytest + + +model_one_sql = """ +select 1 as fun +""" + +metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day +""" + +schema1_yml = """ +version: 2 + +models: + - name: model_one + +semantic_models: + - name: semantic_people + model: ref('model_one') + dimensions: + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: people + agg: count + expr: fun + entities: + - name: fun + type: primary + defaults: + agg_time_dimension: created_at + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + +exposures: + - name: proxy_for_dashboard + description: "My Exposure" + type: "dashboard" + owner: + name: "Dashboard Tester" + email: "tester@dashboard.com" + depends_on: + - ref("model_one") +""" + +schema2_yml = """ +version: 2 + +models: + - name: model_one + +semantic_models: + - name: semantic_people + model: ref('model_one') + dimensions: + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: people + agg: count + expr: fun + entities: + - name: fun + type: primary + defaults: + agg_time_dimension: created_at + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + config: + enabled: false + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + +exposures: + - name: proxy_for_dashboard + description: "My Exposure" + config: + enabled: false + type: "dashboard" + owner: + name: "Dashboard Tester" + email: "tester@dashboard.com" + depends_on: + - ref("model_one") +""" + +schema3_yml = """ +version: 2 + +models: + - name: model_one + +semantic_models: + - name: semantic_people + model: ref('model_one') + dimensions: + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: people + agg: count + expr: fun + entities: + - name: fun + type: primary + defaults: + agg_time_dimension: created_at + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' +""" + +schema4_yml = """ +version: 2 + +models: + - name: model_one + +exposures: + - name: proxy_for_dashboard + description: "My Exposure" + config: + enabled: false + type: "dashboard" + owner: + name: "Dashboard Tester" + email: "tester@dashboard.com" + depends_on: + - ref("model_one") +""" + + +class TestDisabled: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "schema.yml": schema1_yml, + } + + def test_pp_disabled(self, project): + expected_exposure = "exposure.test.proxy_for_dashboard" + expected_metric = "metric.test.number_of_people" + + run_dbt(["seed"]) + manifest = run_dbt(["parse"]) + + assert expected_exposure in manifest.exposures + assert expected_metric in manifest.metrics + assert expected_exposure not in manifest.disabled + assert expected_metric not in manifest.disabled + + # Update schema file with disabled metric and exposure + write_file(schema2_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + assert expected_exposure not in manifest.exposures + assert expected_metric not in manifest.metrics + assert expected_exposure in manifest.disabled + assert expected_metric in manifest.disabled + + # Update schema file with enabled metric and exposure + write_file(schema1_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + assert expected_exposure in manifest.exposures + assert expected_metric in manifest.metrics + assert expected_exposure not in manifest.disabled + assert expected_metric not in manifest.disabled + + # Update schema file - remove exposure, enable metric + write_file(schema3_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + assert expected_exposure not in manifest.exposures + assert expected_metric in manifest.metrics + assert expected_exposure not in manifest.disabled + assert expected_metric not in manifest.disabled + + # Update schema file - add back exposure, remove metric + write_file(schema4_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + assert expected_exposure not in manifest.exposures + assert expected_metric not in manifest.metrics + assert expected_exposure in manifest.disabled + assert expected_metric not in manifest.disabled diff --git a/tests/functional/partial_parsing/test_pp_docs.py b/tests/functional/partial_parsing/test_pp_docs.py new file mode 100644 index 000000000..5df08d4d1 --- /dev/null +++ b/tests/functional/partial_parsing/test_pp_docs.py @@ -0,0 +1,257 @@ +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file +import pytest + + +model_one_sql = """ +select 1 as fun +""" + +raw_customers_csv = """id,first_name,last_name,email +1,Michael,Perez,mperez0@chronoengine.com +2,Shawn,Mccoy,smccoy1@reddit.com +3,Kathleen,Payne,kpayne2@cargocollective.com +4,Jimmy,Cooper,jcooper3@cargocollective.com +5,Katherine,Rice,krice4@typepad.com +6,Sarah,Ryan,sryan5@gnu.org +7,Martin,Mcdonald,mmcdonald6@opera.com +8,Frank,Robinson,frobinson7@wunderground.com +9,Jennifer,Franklin,jfranklin8@mail.ru +10,Henry,Welch,hwelch9@list-manage.com +""" + +my_macro_sql = """ +{% macro my_macro(something) %} + + select + '{{ something }}' as something2 + +{% endmacro %} + +""" + +customers1_md = """ +{% docs customer_table %} + +This table contains customer data + +{% enddocs %} +""" + +customers2_md = """ +{% docs customer_table %} + +LOTS of customer data + +{% enddocs %} + +""" + +schema1_yml = """ +version: 2 + +models: + - name: model_one + description: "{{ doc('customer_table') }}" +""" + +schema2_yml = """ +version: 2 + +models: + - name: model_one + description: "{{ doc('customer_table') }}" + +macros: + - name: my_macro + description: "{{ doc('customer_table') }}" + +sources: + - name: seed_sources + description: "{{ doc('customer_table') }}" + schema: "{{ target.schema }}" + tables: + - name: raw_customers + columns: + - name: id + data_tests: + - not_null: + severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" + - unique + - name: first_name + - name: last_name + - name: email + +exposures: + - name: proxy_for_dashboard + description: "{{ doc('customer_table') }}" + type: "dashboard" + owner: + name: "Dashboard Tester" + email: "tester@dashboard.com" + depends_on: + - ref("model_one") + - ref("raw_customers") + - source("seed_sources", "raw_customers") +""" + + +class TestDocs: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "raw_customers.csv": raw_customers_csv, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "my_macro.sql": my_macro_sql, + } + + def test_pp_docs(self, project): + run_dbt(["seed"]) + results = run_dbt(["run"]) + assert len(results) == 1 + + # Add docs file customers.md + write_file(customers1_md, project.project_root, "models", "customers.md") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + assert len(manifest.docs) == 2 + + # Add schema file with 'docs' description + write_file(schema1_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + assert len(manifest.docs) == 2 + doc_id = "doc.test.customer_table" + assert doc_id in manifest.docs + doc = manifest.docs[doc_id] + doc_file_id = doc.file_id + assert doc_file_id in manifest.files + source_file = manifest.files[doc_file_id] + assert len(source_file.nodes) == 1 + model_one_id = "model.test.model_one" + assert model_one_id in source_file.nodes + model_node = manifest.nodes[model_one_id] + assert model_node.description == "This table contains customer data" + + # Update the doc file + write_file(customers2_md, project.project_root, "models", "customers.md") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + assert len(manifest.docs) == 2 + assert model_one_id in manifest.nodes + model_node = manifest.nodes[model_one_id] + assert "LOTS" in model_node.description + + # Add a macro patch, source and exposure with doc + write_file(schema2_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + doc_file = manifest.files[doc_file_id] + expected_nodes = [ + "model.test.model_one", + "source.test.seed_sources.raw_customers", + "macro.test.my_macro", + "exposure.test.proxy_for_dashboard", + ] + assert expected_nodes == doc_file.nodes + source_id = "source.test.seed_sources.raw_customers" + assert manifest.sources[source_id].source_description == "LOTS of customer data" + macro_id = "macro.test.my_macro" + assert manifest.macros[macro_id].description == "LOTS of customer data" + exposure_id = "exposure.test.proxy_for_dashboard" + assert manifest.exposures[exposure_id].description == "LOTS of customer data" + + # update the doc file again + write_file(customers1_md, project.project_root, "models", "customers.md") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + source_file = manifest.files[doc_file_id] + assert model_one_id in source_file.nodes + model_node = manifest.nodes[model_one_id] + assert model_node.description == "This table contains customer data" + assert ( + manifest.sources[source_id].source_description == "This table contains customer data" + ) + assert manifest.macros[macro_id].description == "This table contains customer data" + assert manifest.exposures[exposure_id].description == "This table contains customer data" + + # check that _lock is working + with manifest._lock: + assert manifest._lock + + +my_model_yml = """ +version: 2 +models: + - name: my_model + columns: + - name: id + description: "{{ doc('whatever') }}" +""" + +my_model_no_description_yml = """ +version: 2 +models: + - name: my_model + columns: + - name: id +""" + +my_model_md = """ +{% docs whatever %} + cool stuff +{% enddocs %} +""" + + +class TestDocsRemoveReplace: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": "select 1 as id", + "my_model.yml": my_model_yml, + "my_model.md": my_model_md, + } + + def test_remove_replace(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + doc_id = "doc.test.whatever" + assert doc_id in manifest.docs + doc = manifest.docs[doc_id] + doc_file = manifest.files[doc.file_id] + + model_id = "model.test.my_model" + assert model_id in manifest.nodes + + assert doc_file.nodes == [model_id] + + model = manifest.nodes[model_id] + model_file_id = model.file_id + assert model_file_id in manifest.files + + # remove the doc file + rm_file(project.project_root, "models", "my_model.md") + # remove description from schema file + write_file(my_model_no_description_yml, project.project_root, "models", "my_model.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert doc_id not in manifest.docs + # The bug was that the file still existed in manifest.files + assert doc.file_id not in manifest.files + + # put back the doc file + write_file(my_model_md, project.project_root, "models", "my_model.md") + # put back the description in the schema file + write_file(my_model_yml, project.project_root, "models", "my_model.yml") + run_dbt(["parse"]) diff --git a/tests/functional/partial_parsing/test_pp_groups.py b/tests/functional/partial_parsing/test_pp_groups.py new file mode 100644 index 000000000..48fcaba6e --- /dev/null +++ b/tests/functional/partial_parsing/test_pp_groups.py @@ -0,0 +1,156 @@ +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, run_dbt, write_file +import pytest + +from tests.functional.partial_parsing.fixtures import ( + groups_schema_yml_one_group, + groups_schema_yml_one_group_model_in_group2, + groups_schema_yml_two_groups, + groups_schema_yml_two_groups_edited, + groups_schema_yml_two_groups_private_orders_invalid_access, + groups_schema_yml_two_groups_private_orders_valid_access, + orders_downstream_sql, + orders_sql, +) + + +class TestGroups: + @pytest.fixture(scope="class") + def models(self): + return { + "orders.sql": orders_sql, + "orders_downstream.sql": orders_downstream_sql, + "schema.yml": groups_schema_yml_one_group, + } + + def test_pp_groups(self, project): + + # initial run + results = run_dbt() + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_nodes = ["model.test.orders", "model.test.orders_downstream"] + expected_groups = ["group.test.test_group"] + assert expected_nodes == sorted(list(manifest.nodes.keys())) + assert expected_groups == sorted(list(manifest.groups.keys())) + + # add group to schema + write_file(groups_schema_yml_two_groups, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_nodes = ["model.test.orders", "model.test.orders_downstream"] + expected_groups = ["group.test.test_group", "group.test.test_group2"] + assert expected_nodes == sorted(list(manifest.nodes.keys())) + assert expected_groups == sorted(list(manifest.groups.keys())) + + # edit group in schema + write_file( + groups_schema_yml_two_groups_edited, project.project_root, "models", "schema.yml" + ) + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_nodes = ["model.test.orders", "model.test.orders_downstream"] + expected_groups = ["group.test.test_group", "group.test.test_group2_edited"] + assert expected_nodes == sorted(list(manifest.nodes.keys())) + assert expected_groups == sorted(list(manifest.groups.keys())) + + # delete group in schema + write_file(groups_schema_yml_one_group, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_nodes = ["model.test.orders", "model.test.orders_downstream"] + expected_groups = ["group.test.test_group"] + assert expected_nodes == sorted(list(manifest.nodes.keys())) + assert expected_groups == sorted(list(manifest.groups.keys())) + + # add back second group + write_file(groups_schema_yml_two_groups, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + + # remove second group with model still configured to second group + write_file( + groups_schema_yml_one_group_model_in_group2, + project.project_root, + "models", + "schema.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["--partial-parse", "run"]) + + # add back second group, make orders private with valid ref + write_file( + groups_schema_yml_two_groups_private_orders_valid_access, + project.project_root, + "models", + "schema.yml", + ) + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + + write_file( + groups_schema_yml_two_groups_private_orders_invalid_access, + project.project_root, + "models", + "schema.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["--partial-parse", "run"]) + + +my_model_c = """ +select * from {{ ref("my_model_a") }} union all +select * from {{ ref("my_model_b") }} +""" + +models_yml = """ +models: + - name: my_model_a + - name: my_model_b + - name: my_model_c +""" + +models_and_groups_yml = """ +groups: + - name: sales_analytics + owner: + name: Sales Analytics + email: sales@jaffleshop.com + +models: + - name: my_model_a + access: private + group: sales_analytics + - name: my_model_b + access: private + group: sales_analytics + - name: my_model_c + access: private + group: sales_analytics +""" + + +class TestAddingModelsToNewGroups: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": "select 1 as id", + "my_model_b.sql": "select 2 as id", + "my_model_c.sql": my_model_c, + "models.yml": models_yml, + } + + def test_adding_models_to_new_groups(self, project): + run_dbt(["compile"]) + # This tests that the correct patch is added to my_model_c. The bug + # was that it was using the old patch, so model_c didn't have the + # correct group and access. + write_file(models_and_groups_yml, project.project_root, "models", "models.yml") + run_dbt(["compile"]) + manifest = get_manifest(project.project_root) + model_c_node = manifest.nodes["model.test.my_model_c"] + assert model_c_node.group == "sales_analytics" + assert model_c_node.access == "private" diff --git a/tests/functional/partial_parsing/test_pp_metrics.py b/tests/functional/partial_parsing/test_pp_metrics.py new file mode 100644 index 000000000..bc6884519 --- /dev/null +++ b/tests/functional/partial_parsing/test_pp_metrics.py @@ -0,0 +1,85 @@ +from dbt.tests.util import get_manifest, run_dbt, write_file +from dbt_common.exceptions import CompilationError +import pytest + +from tests.functional.partial_parsing.fixtures import ( + metric_model_a_sql, + metricflow_time_spine_sql, + people_metrics_yml, + people_metrics2_yml, + people_metrics3_yml, + people_semantic_models_yml, + people_sql, +) + + +class TestMetrics: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + } + + def test_metrics(self, project): + # initial run + results = run_dbt(["run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + assert len(manifest.nodes) == 2 + + # Add metrics yaml file (and necessary semantic models yaml) + write_file( + people_semantic_models_yml, + project.project_root, + "models", + "people_semantic_models.yml", + ) + write_file(people_metrics_yml, project.project_root, "models", "people_metrics.yml") + results = run_dbt(["run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + assert len(manifest.metrics) == 2 + metric_people_id = "metric.test.number_of_people" + metric_people = manifest.metrics[metric_people_id] + expected_meta = {"my_meta": "testing"} + assert metric_people.meta == expected_meta + + # TODO: Bring back when we resolving `depends_on_nodes` + # metric_tenure_id = "metric.test.collective_tenure" + # metric_tenure = manifest.metrics[metric_tenure_id] + # assert metric_people.refs == [RefArgs(name="people")] + # assert metric_tenure.refs == [RefArgs(name="people")] + # expected_depends_on_nodes = ["model.test.people"] + # assert metric_people.depends_on.nodes == expected_depends_on_nodes + + # Change metrics yaml files + write_file(people_metrics2_yml, project.project_root, "models", "people_metrics.yml") + results = run_dbt(["run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + metric_people = manifest.metrics[metric_people_id] + expected_meta = {"my_meta": "replaced"} + assert metric_people.meta == expected_meta + # TODO: Bring back when we resolving `depends_on_nodes` + # expected_depends_on_nodes = ["model.test.people"] + # assert metric_people.depends_on.nodes == expected_depends_on_nodes + + # Add model referring to metric + write_file(metric_model_a_sql, project.project_root, "models", "metric_model_a.sql") + results = run_dbt(["run"]) + manifest = get_manifest(project.project_root) + # TODO: Bring back when we resolving `depends_on_nodes` + # model_a = manifest.nodes["model.test.metric_model_a"] + # expected_depends_on_nodes = [ + # "metric.test.number_of_people", + # "metric.test.collective_tenure", + # ] + # assert model_a.depends_on.nodes == expected_depends_on_nodes + + # Then delete a metric + write_file(people_metrics3_yml, project.project_root, "models", "people_metrics.yml") + with pytest.raises(CompilationError): + # We use "parse" here and not "run" because we're checking that the CompilationError + # occurs at parse time, not compilation + results = run_dbt(["parse"]) diff --git a/tests/functional/partial_parsing/test_pp_vars.py b/tests/functional/partial_parsing/test_pp_vars.py new file mode 100644 index 000000000..cb10a7cd2 --- /dev/null +++ b/tests/functional/partial_parsing/test_pp_vars.py @@ -0,0 +1,405 @@ +import os +from pathlib import Path + +from dbt.adapters.exceptions import FailedToConnectError +from dbt.constants import SECRET_ENV_PREFIX +from dbt.exceptions import ParsingError +from dbt.tests.util import ( + get_manifest, + run_dbt, + run_dbt_and_capture, + write_file, +) +import pytest + +from tests.functional.partial_parsing.fixtures import ( + env_var_macro_sql, + env_var_macros_yml, + env_var_metrics_yml, + env_var_model_one_sql, + env_var_model_sql, + env_var_model_test_yml, + env_var_schema_yml, + env_var_schema2_yml, + env_var_schema3_yml, + env_var_sources_yml, + metricflow_time_spine_sql, + model_color_sql, + model_one_sql, + people_semantic_models_yml, + people_sql, + raw_customers_csv, + test_color_sql, +) + + +os.environ["DBT_PP_TEST"] = "true" + + +class TestEnvVars: + @pytest.fixture(scope="class") + def models(self): + return { + "model_color.sql": model_color_sql, + } + + def test_env_vars_models(self, project): + + # initial run + results = run_dbt(["run"]) + assert len(results) == 1 + + # copy a file with an env_var call without an env_var + write_file(env_var_model_sql, project.project_root, "models", "env_var_model.sql") + with pytest.raises(ParsingError): + results = run_dbt(["--partial-parse", "run"]) + + # set the env var + os.environ["ENV_VAR_TEST"] = "TestingEnvVars" + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_env_vars = {"ENV_VAR_TEST": "TestingEnvVars"} + assert expected_env_vars == manifest.env_vars + model_id = "model.test.env_var_model" + model = manifest.nodes[model_id] + model_created_at = model.created_at + + # change the env var + os.environ["ENV_VAR_TEST"] = "second" + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_env_vars = {"ENV_VAR_TEST": "second"} + assert expected_env_vars == manifest.env_vars + assert model_created_at != manifest.nodes[model_id].created_at + + # set an env_var in a schema file + write_file(env_var_schema_yml, project.project_root, "models", "schema.yml") + write_file(env_var_model_one_sql, project.project_root, "models", "model_one.sql") + with pytest.raises(ParsingError): + results = run_dbt(["--partial-parse", "run"]) + + # actually set the env_var + os.environ["TEST_SCHEMA_VAR"] = "view" + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + expected_env_vars = {"ENV_VAR_TEST": "second", "TEST_SCHEMA_VAR": "view"} + assert expected_env_vars == manifest.env_vars + + # env vars in a source + os.environ["ENV_VAR_DATABASE"] = "dbt" + os.environ["ENV_VAR_SEVERITY"] = "warn" + write_file(raw_customers_csv, project.project_root, "seeds", "raw_customers.csv") + write_file(env_var_sources_yml, project.project_root, "models", "sources.yml") + run_dbt(["--partial-parse", "seed"]) + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + manifest = get_manifest(project.project_root) + expected_env_vars = { + "ENV_VAR_TEST": "second", + "TEST_SCHEMA_VAR": "view", + "ENV_VAR_DATABASE": "dbt", + "ENV_VAR_SEVERITY": "warn", + } + assert expected_env_vars == manifest.env_vars + assert len(manifest.sources) == 1 + source_id = "source.test.seed_sources.raw_customers" + source = manifest.sources[source_id] + assert source.database == "dbt" + schema_file = manifest.files[source.file_id] + test_id = "test.test.source_not_null_seed_sources_raw_customers_id.e39ee7bf0d" + test_node = manifest.nodes[test_id] + assert test_node.config.severity == "WARN" + + # Change severity env var + os.environ["ENV_VAR_SEVERITY"] = "error" + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + expected_env_vars = { + "ENV_VAR_TEST": "second", + "TEST_SCHEMA_VAR": "view", + "ENV_VAR_DATABASE": "dbt", + "ENV_VAR_SEVERITY": "error", + } + assert expected_env_vars == manifest.env_vars + source_id = "source.test.seed_sources.raw_customers" + source = manifest.sources[source_id] + schema_file = manifest.files[source.file_id] + expected_schema_file_env_vars = { + "sources": {"seed_sources": ["ENV_VAR_DATABASE", "ENV_VAR_SEVERITY"]} + } + assert expected_schema_file_env_vars == schema_file.env_vars + test_node = manifest.nodes[test_id] + assert test_node.config.severity == "ERROR" + + # Change database env var + os.environ["ENV_VAR_DATABASE"] = "test_dbt" + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + expected_env_vars = { + "ENV_VAR_TEST": "second", + "TEST_SCHEMA_VAR": "view", + "ENV_VAR_DATABASE": "test_dbt", + "ENV_VAR_SEVERITY": "error", + } + assert expected_env_vars == manifest.env_vars + source = manifest.sources[source_id] + assert source.database == "test_dbt" + + # Delete database env var + del os.environ["ENV_VAR_DATABASE"] + with pytest.raises(ParsingError): + results = run_dbt(["--partial-parse", "run"]) + os.environ["ENV_VAR_DATABASE"] = "test_dbt" + + # Add generic test with test kwarg that's rendered late (no curly brackets) + os.environ["ENV_VAR_DATABASE"] = "dbt" + write_file(test_color_sql, project.project_root, "macros", "test_color.sql") + results = run_dbt(["--partial-parse", "run"]) + # Add source test using test_color and an env_var for color + write_file(env_var_schema2_yml, project.project_root, "models/schema.yml") + with pytest.raises(ParsingError): + results = run_dbt(["--partial-parse", "run"]) + os.environ["ENV_VAR_COLOR"] = "green" + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + test_color_id = "test.test.check_color_model_one_env_var_ENV_VAR_COLOR___fun.89638de387" + test_node = manifest.nodes[test_color_id] + # kwarg was rendered but not changed (it will be rendered again when compiled) + assert test_node.test_metadata.kwargs["color"] == "env_var('ENV_VAR_COLOR')" + results = run_dbt(["--partial-parse", "test"]) + + # Add an exposure with an env_var + os.environ["ENV_VAR_OWNER"] = "John Doe" + write_file(env_var_schema3_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + expected_env_vars = { + "ENV_VAR_TEST": "second", + "TEST_SCHEMA_VAR": "view", + "ENV_VAR_DATABASE": "dbt", + "ENV_VAR_SEVERITY": "error", + "ENV_VAR_COLOR": "green", + "ENV_VAR_OWNER": "John Doe", + } + assert expected_env_vars == manifest.env_vars + exposure = list(manifest.exposures.values())[0] + schema_file = manifest.files[exposure.file_id] + expected_sf_env_vars = { + "models": {"model_one": ["TEST_SCHEMA_VAR", "ENV_VAR_COLOR"]}, + "exposures": {"proxy_for_dashboard": ["ENV_VAR_OWNER"]}, + } + assert expected_sf_env_vars == schema_file.env_vars + + # add a macro and a macro schema file + os.environ["ENV_VAR_SOME_KEY"] = "toodles" + write_file(env_var_macro_sql, project.project_root, "macros", "env_var_macro.sql") + write_file(env_var_macros_yml, project.project_root, "macros", "env_var_macros.yml") + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + expected_env_vars = { + "ENV_VAR_TEST": "second", + "TEST_SCHEMA_VAR": "view", + "ENV_VAR_DATABASE": "dbt", + "ENV_VAR_SEVERITY": "error", + "ENV_VAR_COLOR": "green", + "ENV_VAR_OWNER": "John Doe", + "ENV_VAR_SOME_KEY": "toodles", + } + assert expected_env_vars == manifest.env_vars + macro_id = "macro.test.do_something" + macro = manifest.macros[macro_id] + assert macro.meta == {"some_key": "toodles"} + # change the env var + os.environ["ENV_VAR_SOME_KEY"] = "dumdedum" + results = run_dbt(["--partial-parse", "run"]) + manifest = get_manifest(project.project_root) + macro = manifest.macros[macro_id] + assert macro.meta == {"some_key": "dumdedum"} + + # Add a schema file with a test on model_color and env_var in test enabled config + write_file(env_var_model_test_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + manifest = get_manifest(project.project_root) + model_color = manifest.nodes["model.test.model_color"] + schema_file = manifest.files[model_color.patch_path] + expected_env_vars = { + "models": { + "model_one": ["TEST_SCHEMA_VAR", "ENV_VAR_COLOR"], + "model_color": ["ENV_VAR_ENABLED"], + }, + "exposures": {"proxy_for_dashboard": ["ENV_VAR_OWNER"]}, + } + assert expected_env_vars == schema_file.env_vars + + # Add a metrics file with env_vars + os.environ["ENV_VAR_METRICS"] = "TeStInG" + write_file(people_sql, project.project_root, "models", "people.sql") + write_file( + metricflow_time_spine_sql, project.project_root, "models", "metricflow_time_spine.sql" + ) + write_file( + people_semantic_models_yml, project.project_root, "models", "semantic_models.yml" + ) + write_file(env_var_metrics_yml, project.project_root, "models", "metrics.yml") + results = run_dbt(["run"]) + manifest = get_manifest(project.project_root) + assert "ENV_VAR_METRICS" in manifest.env_vars + assert manifest.env_vars["ENV_VAR_METRICS"] == "TeStInG" + metric_node = manifest.metrics["metric.test.number_of_people"] + assert metric_node.meta == {"my_meta": "TeStInG"} + + # Change metrics env var + os.environ["ENV_VAR_METRICS"] = "Changed!" + results = run_dbt(["run"]) + manifest = get_manifest(project.project_root) + metric_node = manifest.metrics["metric.test.number_of_people"] + assert metric_node.meta == {"my_meta": "Changed!"} + + # delete the env vars to cleanup + del os.environ["ENV_VAR_TEST"] + del os.environ["ENV_VAR_SEVERITY"] + del os.environ["ENV_VAR_DATABASE"] + del os.environ["TEST_SCHEMA_VAR"] + del os.environ["ENV_VAR_COLOR"] + del os.environ["ENV_VAR_SOME_KEY"] + del os.environ["ENV_VAR_OWNER"] + del os.environ["ENV_VAR_METRICS"] + + +class TestProjectEnvVars: + @pytest.fixture(scope="class") + def project_config_update(self): + # Need to set the environment variable here initially because + # the project fixture loads the config. + os.environ["ENV_VAR_NAME"] = "Jane Smith" + return {"models": {"+meta": {"meta_name": "{{ env_var('ENV_VAR_NAME') }}"}}} + + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + def test_project_env_vars(self, project): + # Initial run + results = run_dbt(["run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + state_check = manifest.state_check + model_id = "model.test.model_one" + model = manifest.nodes[model_id] + assert model.config.meta["meta_name"] == "Jane Smith" + env_vars_hash_checksum = state_check.project_env_vars_hash.checksum + + # Change the environment variable + os.environ["ENV_VAR_NAME"] = "Jane Doe" + results = run_dbt(["run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_id] + assert model.config.meta["meta_name"] == "Jane Doe" + assert env_vars_hash_checksum != manifest.state_check.project_env_vars_hash.checksum + + # cleanup + del os.environ["ENV_VAR_NAME"] + + +class TestProfileEnvVars: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + @pytest.fixture(scope="class") + def dbt_profile_target(self): + # Need to set these here because the base integration test class + # calls 'load_config' before the tests are run. + # Note: only the specified profile is rendered, so there's no + # point it setting env_vars in non-used profiles. + os.environ["ENV_VAR_USER"] = "root" + os.environ["ENV_VAR_PASS"] = "password" + return { + "type": "postgres", + "threads": 4, + "host": "localhost", + "port": 5432, + "user": "{{ env_var('ENV_VAR_USER') }}", + "pass": "{{ env_var('ENV_VAR_PASS') }}", + "dbname": "dbt", + } + + def test_profile_env_vars(self, project, logs_dir): + + # Initial run + os.environ["ENV_VAR_USER"] = "root" + os.environ["ENV_VAR_PASS"] = "password" + + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + env_vars_checksum = manifest.state_check.profile_env_vars_hash.checksum + + # Change env_vars, the user doesn't exist, this should fail + os.environ["ENV_VAR_USER"] = "fake_user" + + # N.B. run_dbt_and_capture won't work here because FailedToConnectError ends the test entirely + with pytest.raises(FailedToConnectError): + run_dbt(["run"], expect_pass=False) + + log_output = Path(logs_dir, "dbt.log").read_text() + assert "env vars used in profiles.yml have changed" in log_output + + manifest = get_manifest(project.project_root) + assert env_vars_checksum != manifest.state_check.profile_env_vars_hash.checksum + + +class TestProfileSecretEnvVars: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + @property + def dbt_profile_target(self): + # Need to set these here because the base integration test class + # calls 'load_config' before the tests are run. + # Note: only the specified profile is rendered, so there's no + # point in setting env_vars in non-used profiles. + + # user is secret and password is not. postgres on macos doesn't care if the password + # changes so we have to change the user. related: https://github.com/dbt-labs/dbt-core/pull/4250 + os.environ[SECRET_ENV_PREFIX + "USER"] = "root" + os.environ["ENV_VAR_PASS"] = "password" + return { + "type": "postgres", + "threads": 4, + "host": "localhost", + "port": 5432, + "user": "{{ env_var('DBT_ENV_SECRET_USER') }}", + "pass": "{{ env_var('ENV_VAR_PASS') }}", + "dbname": "dbt", + } + + def test_profile_secret_env_vars(self, project): + + # Initial run + os.environ[SECRET_ENV_PREFIX + "USER"] = "root" + os.environ["ENV_VAR_PASS"] = "password" + + results = run_dbt(["run"]) + manifest = get_manifest(project.project_root) + env_vars_checksum = manifest.state_check.profile_env_vars_hash.checksum + + # Change a secret var, it shouldn't register because we shouldn't save secrets. + os.environ[SECRET_ENV_PREFIX + "USER"] = "fake_user" + # we just want to see if the manifest has included + # the secret in the hash of environment variables. + (results, log_output) = run_dbt_and_capture(["run"], expect_pass=True) + # I020 is the event code for "env vars used in profiles.yml have changed" + assert not ("I020" in log_output) + manifest = get_manifest(project.project_root) + assert env_vars_checksum == manifest.state_check.profile_env_vars_hash.checksum diff --git a/tests/functional/partial_parsing/test_versioned_models.py b/tests/functional/partial_parsing/test_versioned_models.py new file mode 100644 index 000000000..d725c6718 --- /dev/null +++ b/tests/functional/partial_parsing/test_versioned_models.py @@ -0,0 +1,128 @@ +import pathlib + +from dbt.exceptions import DuplicateVersionedUnversionedError +from dbt.tests.util import ( + get_manifest, + read_file, + rm_file, + run_dbt, + write_file, +) +import pytest + + +model_one_sql = """ +select 1 as fun +""" + +model_one_downstream_sql = """ +select fun from {{ ref('model_one') }} +""" + +models_versions_schema_yml = """ + +models: + - name: model_one + description: "The first model" + versions: + - v: 1 + - v: 2 +""" + +models_versions_defined_in_schema_yml = """ +models: + - name: model_one + description: "The first model" + versions: + - v: 1 + - v: 2 + defined_in: model_one_different +""" + +models_versions_updated_schema_yml = """ +models: + - name: model_one + latest_version: 1 + description: "The first model" + versions: + - v: 1 + - v: 2 + defined_in: model_one_different +""" + +model_two_sql = """ +select 1 as notfun +""" + + +class TestVersionedModels: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one_v1.sql": model_one_sql, + "model_one.sql": model_one_sql, + "model_one_downstream.sql": model_one_downstream_sql, + "schema.yml": models_versions_schema_yml, + } + + def test_pp_versioned_models(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + manifest = get_manifest(project.project_root) + model_one_node = manifest.nodes["model.test.model_one.v1"] + assert not model_one_node.is_latest_version + model_two_node = manifest.nodes["model.test.model_one.v2"] + assert model_two_node.is_latest_version + # assert unpinned ref points to latest version + model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"] + assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v2"] + + # update schema.yml block - model_one is now 'defined_in: model_one_different' + rm_file(project.project_root, "models", "model_one.sql") + write_file(model_one_sql, project.project_root, "models", "model_one_different.sql") + write_file( + models_versions_defined_in_schema_yml, project.project_root, "models", "schema.yml" + ) + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + + # update versions schema.yml block - latest_version from 2 to 1 + write_file( + models_versions_updated_schema_yml, project.project_root, "models", "schema.yml" + ) + # This is where the test was failings in a CI run with: + # relation \"test..._test_partial_parsing.model_one_downstream\" does not exist + # because in core/dbt/include/global_project/macros/materializations/models/view/view.sql + # "existing_relation" didn't actually exist by the time it gets to the rename of the + # existing relation. + (pathlib.Path(project.project_root) / "log_output").mkdir(parents=True, exist_ok=True) + results = run_dbt( + ["--partial-parse", "--log-format-file", "json", "--log-path", "log_output", "run"] + ) + assert len(results) == 3 + + manifest = get_manifest(project.project_root) + model_one_node = manifest.nodes["model.test.model_one.v1"] + assert model_one_node.is_latest_version + model_two_node = manifest.nodes["model.test.model_one.v2"] + assert not model_two_node.is_latest_version + # assert unpinned ref points to latest version + model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"] + assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v1"] + + # assert unpinned ref to latest-not-max version yields an "FYI" info-level log + log_output = read_file("log_output", "dbt.log").replace("\n", " ").replace("\\n", " ") + assert "UnpinnedRefNewVersionAvailable" in log_output + + # update versioned model + write_file(model_two_sql, project.project_root, "models", "model_one_different.sql") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 3 + manifest = get_manifest(project.project_root) + assert len(manifest.nodes) == 3 + + # create a new model_one in model_one.sql and re-parse + write_file(model_one_sql, project.project_root, "models", "model_one.sql") + with pytest.raises(DuplicateVersionedUnversionedError): + run_dbt(["parse"]) diff --git a/tests/functional/postgres/fixtures.py b/tests/functional/postgres/fixtures.py new file mode 100644 index 000000000..93b26b4f3 --- /dev/null +++ b/tests/functional/postgres/fixtures.py @@ -0,0 +1,134 @@ +models__incremental_sql = """ +{{ + config( + materialized = "incremental", + indexes=[ + {'columns': ['column_a'], 'type': 'hash'}, + {'columns': ['column_a', 'column_b'], 'unique': True}, + ] + ) +}} + +select * +from ( + select 1 as column_a, 2 as column_b +) t + +{% if is_incremental() %} + where column_a > (select max(column_a) from {{this}}) +{% endif %} + +""" + +models__table_sql = """ +{{ + config( + materialized = "table", + indexes=[ + {'columns': ['column_a']}, + {'columns': ['column_b']}, + {'columns': ['column_a', 'column_b']}, + {'columns': ['column_b', 'column_a'], 'type': 'btree', 'unique': True}, + {'columns': ['column_a'], 'type': 'hash'} + ] + ) +}} + +select 1 as column_a, 2 as column_b + +""" + +models_invalid__invalid_columns_type_sql = """ +{{ + config( + materialized = "table", + indexes=[ + {'columns': 'column_a, column_b'}, + ] + ) +}} + +select 1 as column_a, 2 as column_b + +""" + +models_invalid__invalid_type_sql = """ +{{ + config( + materialized = "table", + indexes=[ + {'columns': ['column_a'], 'type': 'non_existent_type'}, + ] + ) +}} + +select 1 as column_a, 2 as column_b + +""" + +models_invalid__invalid_unique_config_sql = """ +{{ + config( + materialized = "table", + indexes=[ + {'columns': ['column_a'], 'unique': 'yes'}, + ] + ) +}} + +select 1 as column_a, 2 as column_b + +""" + +models_invalid__missing_columns_sql = """ +{{ + config( + materialized = "table", + indexes=[ + {'unique': True}, + ] + ) +}} + +select 1 as column_a, 2 as column_b + +""" + +snapshots__colors_sql = """ +{% snapshot colors %} + + {{ + config( + target_database=database, + target_schema=schema, + unique_key='id', + strategy='check', + check_cols=['color'], + indexes=[ + {'columns': ['id'], 'type': 'hash'}, + {'columns': ['id', 'color'], 'unique': True}, + ] + ) + }} + + {% if var('version') == 1 %} + + select 1 as id, 'red' as color union all + select 2 as id, 'green' as color + + {% else %} + + select 1 as id, 'blue' as color union all + select 2 as id, 'green' as color + + {% endif %} + +{% endsnapshot %} + +""" + +seeds__seed_csv = """country_code,country_name +US,United States +CA,Canada +GB,United Kingdom +""" diff --git a/tests/functional/postgres/test_indexes.py b/tests/functional/postgres/test_indexes.py new file mode 100644 index 000000000..cf706fb83 --- /dev/null +++ b/tests/functional/postgres/test_indexes.py @@ -0,0 +1,148 @@ +import re + +from dbt.tests.util import run_dbt, run_dbt_and_capture +import pytest + +from tests.functional.postgres.fixtures import ( + models__incremental_sql, + models__table_sql, + models_invalid__invalid_columns_type_sql, + models_invalid__invalid_type_sql, + models_invalid__invalid_unique_config_sql, + models_invalid__missing_columns_sql, + seeds__seed_csv, + snapshots__colors_sql, +) + + +INDEX_DEFINITION_PATTERN = re.compile(r"using\s+(\w+)\s+\((.+)\)\Z") + + +class TestPostgresIndex: + @pytest.fixture(scope="class") + def models(self): + return { + "table.sql": models__table_sql, + "incremental.sql": models__incremental_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": seeds__seed_csv} + + @pytest.fixture(scope="class") + def snapshots(self): + return {"colors.sql": snapshots__colors_sql} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "seeds": { + "quote_columns": False, + "indexes": [ + {"columns": ["country_code"], "unique": False, "type": "hash"}, + {"columns": ["country_code", "country_name"], "unique": True}, + ], + }, + "vars": { + "version": 1, + }, + } + + def test_table(self, project, unique_schema): + results = run_dbt(["run", "--models", "table"]) + assert len(results) == 1 + + indexes = self.get_indexes("table", project, unique_schema) + expected = [ + {"columns": "column_a", "unique": False, "type": "btree"}, + {"columns": "column_b", "unique": False, "type": "btree"}, + {"columns": "column_a, column_b", "unique": False, "type": "btree"}, + {"columns": "column_b, column_a", "unique": True, "type": "btree"}, + {"columns": "column_a", "unique": False, "type": "hash"}, + ] + assert len(indexes) == len(expected) + + def test_incremental(self, project, unique_schema): + for additional_argument in [[], [], ["--full-refresh"]]: + results = run_dbt(["run", "--models", "incremental"] + additional_argument) + assert len(results) == 1 + + indexes = self.get_indexes("incremental", project, unique_schema) + expected = [ + {"columns": "column_a", "unique": False, "type": "hash"}, + {"columns": "column_a, column_b", "unique": True, "type": "btree"}, + ] + assert len(indexes) == len(expected) + + def test_seed(self, project, unique_schema): + for additional_argument in [[], [], ["--full-refresh"]]: + results = run_dbt(["seed"] + additional_argument) + assert len(results) == 1 + + indexes = self.get_indexes("seed", project, unique_schema) + expected = [ + {"columns": "country_code", "unique": False, "type": "hash"}, + {"columns": "country_code, country_name", "unique": True, "type": "btree"}, + ] + assert len(indexes) == len(expected) + + def test_snapshot(self, project, unique_schema): + for version in [1, 2]: + results = run_dbt(["snapshot", "--vars", f"version: {version}"]) + assert len(results) == 1 + + indexes = self.get_indexes("colors", project, unique_schema) + expected = [ + {"columns": "id", "unique": False, "type": "hash"}, + {"columns": "id, color", "unique": True, "type": "btree"}, + ] + assert len(indexes) == len(expected) + + def get_indexes(self, table_name, project, unique_schema): + sql = f""" + SELECT + pg_get_indexdef(idx.indexrelid) as index_definition + FROM pg_index idx + JOIN pg_class tab ON tab.oid = idx.indrelid + WHERE + tab.relname = '{table_name}' + AND tab.relnamespace = ( + SELECT oid FROM pg_namespace WHERE nspname = '{unique_schema}' + ); + """ + results = project.run_sql(sql, fetch="all") + return [self.parse_index_definition(row[0]) for row in results] + + def parse_index_definition(self, index_definition): + index_definition = index_definition.lower() + is_unique = "unique" in index_definition + m = INDEX_DEFINITION_PATTERN.search(index_definition) + return { + "columns": m.group(2), + "unique": is_unique, + "type": m.group(1), + } + + def assertCountEqual(self, a, b): + assert len(a) == len(b) + + +class TestPostgresInvalidIndex: + @pytest.fixture(scope="class") + def models(self): + return { + "invalid_unique_config.sql": models_invalid__invalid_unique_config_sql, + "invalid_type.sql": models_invalid__invalid_type_sql, + "invalid_columns_type.sql": models_invalid__invalid_columns_type_sql, + "missing_columns.sql": models_invalid__missing_columns_sql, + } + + def test_invalid_index_configs(self, project): + results, output = run_dbt_and_capture(expect_pass=False) + assert len(results) == 4 + assert re.search(r"columns.*is not of type 'array'", output) + assert re.search(r"unique.*is not of type 'boolean'", output) + assert re.search(r"'columns' is a required property", output) + assert re.search(r"Database Error in model invalid_type", output) diff --git a/tests/functional/profiles/test_profile_dir.py b/tests/functional/profiles/test_profile_dir.py new file mode 100644 index 000000000..fbb39ed9c --- /dev/null +++ b/tests/functional/profiles/test_profile_dir.py @@ -0,0 +1,172 @@ +from contextlib import contextmanager +import os +from pathlib import Path + +import pytest +import yaml + +from dbt.tests.util import ( + rm_file, + run_dbt, + run_dbt_and_capture, + write_file, +) + + +@pytest.fixture(scope="class") +def profiles_yml(profiles_root, dbt_profile_data): + write_file(yaml.safe_dump(dbt_profile_data), profiles_root, "profiles.yml") + return dbt_profile_data + + +@pytest.fixture(scope="class") +def profiles_home_root(): + return os.path.join(os.path.expanduser("~"), ".dbt") + + +@pytest.fixture(scope="class") +def profiles_env_root(tmpdir_factory): + path = tmpdir_factory.mktemp("profile_env") + # environment variables are lowercased for some reason in _get_flag_value_from_env within dbt.flags + return str(path).lower() + + +@pytest.fixture(scope="class") +def profiles_flag_root(tmpdir_factory): + return tmpdir_factory.mktemp("profile_flag") + + +@pytest.fixture(scope="class") +def profiles_project_root(project): + return project.project_root + + +@pytest.fixture(scope="class") +def cwd(): + return os.getcwd() + + +@pytest.fixture(scope="class") +def cwd_parent(cwd): + return os.path.dirname(cwd) + + +@pytest.fixture(scope="class") +def cwd_child(): + # pick any child directory of the dbt project + return Path(os.getcwd()) / "macros" + + +@pytest.fixture +def write_profiles_yml(request): + def _write_profiles_yml(profiles_dir, dbt_profile_contents): + def cleanup(): + rm_file(Path(profiles_dir) / "profiles.yml") + + request.addfinalizer(cleanup) + write_file(yaml.safe_dump(dbt_profile_contents), profiles_dir, "profiles.yml") + + return _write_profiles_yml + + +# https://gist.github.com/igniteflow/7267431?permalink_comment_id=2551951#gistcomment-2551951 +@contextmanager +def environ(env): + """Temporarily set environment variables inside the context manager and + fully restore previous environment afterwards + """ + original_env = {key: os.getenv(key) for key in env} + os.environ.update(env) + try: + yield + finally: + for key, value in original_env.items(): + if value is None: + del os.environ[key] + else: + os.environ[key] = value + + +class TestProfilesMayNotExist: + def test_debug(self, project): + # The database will not be able to connect; expect neither a pass or a failure (but not an exception) + run_dbt(["debug", "--profiles-dir", "does_not_exist"], expect_pass=None) + + def test_deps(self, project): + run_dbt(["deps", "--profiles-dir", "does_not_exist"]) + + +class TestProfiles: + def dbt_debug(self, project_dir_cli_arg=None, profiles_dir_cli_arg=None): + # begin with no command-line args or user config (from profiles.yml) + # flags.set_from_args(Namespace(), {}) + command = ["debug"] + + if project_dir_cli_arg: + command.extend(["--project-dir", str(project_dir_cli_arg)]) + + if profiles_dir_cli_arg: + command.extend(["--profiles-dir", str(profiles_dir_cli_arg)]) + + # get the output of `dbt debug` regardless of the exit code + return run_dbt_and_capture(command, expect_pass=None) + + @pytest.mark.parametrize( + "project_dir_cli_arg, working_directory", + [ + # 3 different scenarios for `--project-dir` flag and current working directory + (None, "cwd"), # no --project-dir flag and cwd is project directory + (None, "cwd_child"), # no --project-dir flag and cwd is a project subdirectory + ("cwd", "cwd_parent"), # use --project-dir flag and cwd is outside of it + ], + ) + def test_profiles( + self, + project_dir_cli_arg, + working_directory, + write_profiles_yml, + dbt_profile_data, + profiles_home_root, + profiles_project_root, + profiles_flag_root, + profiles_env_root, + request, + ): + """Verify priority order to search for profiles.yml configuration. + + Reverse priority order: + 1. HOME directory + 2. DBT_PROFILES_DIR environment variable + 3. --profiles-dir command-line argument + + Specification later in this list will take priority over earlier ones, even when both are provided. + """ + + # https://pypi.org/project/pytest-lazy-fixture/ is an alternative to using request.getfixturevalue + if project_dir_cli_arg is not None: + project_dir_cli_arg = request.getfixturevalue(project_dir_cli_arg) + + if working_directory is not None: + working_directory = request.getfixturevalue(working_directory) + + # start in the specified directory + if working_directory is not None: + os.chdir(working_directory) + # default case with profiles.yml in the HOME directory + _, stdout = self.dbt_debug(project_dir_cli_arg) + assert f"Using profiles.yml file at {profiles_home_root}" in stdout + + # set DBT_PROFILES_DIR environment variable for the remainder of the cases + env_vars = {"DBT_PROFILES_DIR": profiles_env_root} + with environ(env_vars): + _, stdout = self.dbt_debug(project_dir_cli_arg) + assert f"Using profiles.yml file at {profiles_env_root}" in stdout + + # This additional case is also within the context manager because we want to verify + # that it takes priority even when the relevant environment variable is also set + + # set --profiles-dir on the command-line + _, stdout = self.dbt_debug( + project_dir_cli_arg, profiles_dir_cli_arg=profiles_flag_root + ) + assert f"Using profiles.yml file at {profiles_flag_root}" in stdout diff --git a/tests/functional/profiles/test_profiles_yml.py b/tests/functional/profiles/test_profiles_yml.py new file mode 100644 index 000000000..c4eeabbd8 --- /dev/null +++ b/tests/functional/profiles/test_profiles_yml.py @@ -0,0 +1,65 @@ +from pathlib import Path + +from dbt.cli.main import dbtRunner +from test_profile_dir import environ + + +jinjaesque_password = "no{{jinja{%re{#ndering" + +profile_with_jinjaesque_password = f"""test: + outputs: + default: + dbname: my_db + host: localhost + password: {jinjaesque_password} + port: 12345 + schema: dummy + threads: 4 + type: postgres + user: peter.webb + target: default +""" + +profile_with_env_password = """test: + outputs: + default: + dbname: my_db + host: localhost + password: "{{ env_var('DBT_PASSWORD') }}" + port: 12345 + schema: dummy + threads: 4 + type: postgres + user: peter.webb + target: default +""" + + +class TestProfileParsing: + def write_profiles_yml(self, profiles_root, content) -> None: + with open(Path(profiles_root, "profiles.yml"), "w") as profiles_yml: + profiles_yml.write(content) + + def test_password_not_jinja_rendered_when_invalid(self, project, profiles_root) -> None: + """Verifies that passwords that contain Jinja control characters, but which are + not valid Jinja, do not cause errors.""" + self.write_profiles_yml(profiles_root, profile_with_jinjaesque_password) + + events = [] + result = dbtRunner(callbacks=[events.append]).invoke(["parse"]) + assert result.success + + for e in events: + assert "no{{jinja{%re{#ndering" not in e.info.msg + + def test_password_jinja_rendered_when_valid(self, project, profiles_root) -> None: + """Verifies that a password value that is valid Jinja is rendered as such, + and that it doesn't cause problems if the resulting value looks like Jinja""" + self.write_profiles_yml(profiles_root, profile_with_env_password) + + events = [] + with environ({"DBT_PASSWORD": jinjaesque_password}): + result = dbtRunner(callbacks=[events.append]).invoke(["parse"]) + + assert result.success + assert project.adapter.config.credentials.password == jinjaesque_password diff --git a/tests/functional/projects/__init__.py b/tests/functional/projects/__init__.py new file mode 100644 index 000000000..3e12bd480 --- /dev/null +++ b/tests/functional/projects/__init__.py @@ -0,0 +1,3 @@ +from dbt_integration import dbt_integration +from graph_selection import GraphSelection +from jaffle_shop import JaffleShop diff --git a/tests/functional/projects/dbt_integration/__init__.py b/tests/functional/projects/dbt_integration/__init__.py new file mode 100644 index 000000000..86a21253a --- /dev/null +++ b/tests/functional/projects/dbt_integration/__init__.py @@ -0,0 +1,21 @@ +from functools import partial + +from tests.functional.projects.utils import read + + +read_macro = partial(read, project="dbt_integration", file_type="macros") +read_model = partial(read, project="dbt_integration", file_type="models") +read_schema = partial(read, project="dbt_integration", file_type="schemas") + + +def dbt_integration(): + return { + "dbt_project.yml": read_schema("project"), + "macros": {"do_something.sql": read_macro("do_something")}, + "models": { + "schema.yml": read_schema("schema"), + "incremental.sql": read_model("incremental"), + "table_model.sql": read_model("table"), + "view_model.sql": read_model("view"), + }, + } diff --git a/tests/functional/projects/dbt_integration/macros/do_something.sql b/tests/functional/projects/dbt_integration/macros/do_something.sql new file mode 100644 index 000000000..1ac21ce76 --- /dev/null +++ b/tests/functional/projects/dbt_integration/macros/do_something.sql @@ -0,0 +1,7 @@ +{% macro do_something(foo, bar) %} + + select + '{{ foo }}'::text as foo, + '{{ bar }}'::text as bar + +{% endmacro %} diff --git a/tests/functional/projects/dbt_integration/models/incremental.sql b/tests/functional/projects/dbt_integration/models/incremental.sql new file mode 100644 index 000000000..a6bf1c8f0 --- /dev/null +++ b/tests/functional/projects/dbt_integration/models/incremental.sql @@ -0,0 +1,10 @@ +{{ config( + materialized = 'incremental', + unique_key = 'id', +) }} + +select * from {{ this.schema }}.seed + +{% if is_incremental() %} + where updated_at > (select max(updated_at) from {{ this }}) +{% endif %} diff --git a/tests/functional/projects/dbt_integration/models/table.sql b/tests/functional/projects/dbt_integration/models/table.sql new file mode 100644 index 000000000..9562ae46e --- /dev/null +++ b/tests/functional/projects/dbt_integration/models/table.sql @@ -0,0 +1,2 @@ +{{ config(materialized = 'table') }} +select * from {{ this.schema }}.seed diff --git a/tests/functional/projects/dbt_integration/models/view.sql b/tests/functional/projects/dbt_integration/models/view.sql new file mode 100644 index 000000000..dd49a1d69 --- /dev/null +++ b/tests/functional/projects/dbt_integration/models/view.sql @@ -0,0 +1,2 @@ +{{ config(materialized = 'view') }} +select * from {{ this.schema }}.seed diff --git a/tests/functional/projects/dbt_integration/schemas/project.yml b/tests/functional/projects/dbt_integration/schemas/project.yml new file mode 100644 index 000000000..13a01e894 --- /dev/null +++ b/tests/functional/projects/dbt_integration/schemas/project.yml @@ -0,0 +1,16 @@ +name: dbt_integration_project +version: '1.0' +config-version: 2 + +model-paths: ["models"] # paths to models +analysis-paths: ["analyses"] # path with analysis files which are compiled, but not run +target-path: "target" # path for compiled code +clean-targets: ["target"] # directories removed by the clean task +test-paths: ["tests"] # where to store test results +seed-paths: ["seeds"] # load CSVs from this directory with `dbt seed` +macro-paths: ["macros"] # where to find macros + +profile: user + +models: + dbt_integration_project: diff --git a/tests/functional/projects/dbt_integration/schemas/schema.yml b/tests/functional/projects/dbt_integration/schemas/schema.yml new file mode 100644 index 000000000..b5105a281 --- /dev/null +++ b/tests/functional/projects/dbt_integration/schemas/schema.yml @@ -0,0 +1,8 @@ +version: 2 + +models: +- name: table_model + columns: + - name: id + data_tests: + - unique diff --git a/tests/functional/projects/graph_selection/__init__.py b/tests/functional/projects/graph_selection/__init__.py new file mode 100644 index 000000000..d21ba96f7 --- /dev/null +++ b/tests/functional/projects/graph_selection/__init__.py @@ -0,0 +1,44 @@ +from functools import partial + +import pytest + +from tests.functional.projects.utils import read + + +read_data = partial(read, project="graph_selection", file_type="data") +read_model = partial(read, project="graph_selection", file_type="models") +read_schema = partial(read, project="graph_selection", file_type="schemas") + + +class GraphSelection: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": read_schema("schema"), + "patch_path_selection_schema.yml": read_schema("patch_path_selection"), + "base_users.sql": read_model("base_users"), + "users.sql": read_model("users"), + "versioned_v3.sql": read_model("base_users"), + "users_rollup.sql": read_model("users_rollup"), + "users_rollup_dependency.sql": read_model("users_rollup_dependency"), + "emails.sql": read_model("emails"), + "emails_alt.sql": read_model("emails_alt"), + "alternative.users.sql": read_model("alternative_users"), + "never_selected.sql": read_model("never_selected"), + "test": { + "subdir.sql": read_model("subdir"), + "versioned_v2.sql": read_model("subdir"), + "subdir": { + "nested_users.sql": read_model("nested_users"), + "versioned_v1.sql": read_model("nested_users"), + }, + }, + } + + @pytest.fixture(scope="class") + def seeds(self, test_data_dir): + return { + "properties.yml": read_schema("properties"), + "seed.csv": read_data("seed"), + "summary_expected.csv": read_data("summary_expected"), + } diff --git a/tests/functional/projects/graph_selection/data/seed.csv b/tests/functional/projects/graph_selection/data/seed.csv new file mode 100644 index 000000000..b77dad1bd --- /dev/null +++ b/tests/functional/projects/graph_selection/data/seed.csv @@ -0,0 +1,101 @@ +id,first_name,last_name,email,gender,ip_address,updated_at +1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168,2015-12-24 12:19:28 +2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35,2015-12-24 12:19:28 +3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243,2015-12-24 12:19:28 +4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175,2015-12-24 12:19:28 +5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136,2015-12-24 12:19:28 +6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220,2015-12-24 12:19:28 +7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64,2015-12-24 12:19:28 +8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13,2015-12-24 12:19:28 +9,Gary,Day,gday8@nih.gov,Male,35.81.68.186,2015-12-24 12:19:28 +10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100,2015-12-24 12:19:28 +11,Raymond,Kelley,rkelleya@fc2.com,Male,213.65.166.67,2015-12-24 12:19:28 +12,Gerald,Robinson,grobinsonb@disqus.com,Male,72.232.194.193,2015-12-24 12:19:28 +13,Mildred,Martinez,mmartinezc@samsung.com,Female,198.29.112.5,2015-12-24 12:19:28 +14,Dennis,Arnold,darnoldd@google.com,Male,86.96.3.250,2015-12-24 12:19:28 +15,Judy,Gray,jgraye@opensource.org,Female,79.218.162.245,2015-12-24 12:19:28 +16,Theresa,Garza,tgarzaf@epa.gov,Female,21.59.100.54,2015-12-24 12:19:28 +17,Gerald,Robertson,grobertsong@csmonitor.com,Male,131.134.82.96,2015-12-24 12:19:28 +18,Philip,Hernandez,phernandezh@adobe.com,Male,254.196.137.72,2015-12-24 12:19:28 +19,Julia,Gonzalez,jgonzalezi@cam.ac.uk,Female,84.240.227.174,2015-12-24 12:19:28 +20,Andrew,Davis,adavisj@patch.com,Male,9.255.67.25,2015-12-24 12:19:28 +21,Kimberly,Harper,kharperk@foxnews.com,Female,198.208.120.253,2015-12-24 12:19:28 +22,Mark,Martin,mmartinl@marketwatch.com,Male,233.138.182.153,2015-12-24 12:19:28 +23,Cynthia,Ruiz,cruizm@google.fr,Female,18.178.187.201,2015-12-24 12:19:28 +24,Samuel,Carroll,scarrolln@youtu.be,Male,128.113.96.122,2015-12-24 12:19:28 +25,Jennifer,Larson,jlarsono@vinaora.com,Female,98.234.85.95,2015-12-24 12:19:28 +26,Ashley,Perry,aperryp@rakuten.co.jp,Female,247.173.114.52,2015-12-24 12:19:28 +27,Howard,Rodriguez,hrodriguezq@shutterfly.com,Male,231.188.95.26,2015-12-24 12:19:28 +28,Amy,Brooks,abrooksr@theatlantic.com,Female,141.199.174.118,2015-12-24 12:19:28 +29,Louise,Warren,lwarrens@adobe.com,Female,96.105.158.28,2015-12-24 12:19:28 +30,Tina,Watson,twatsont@myspace.com,Female,251.142.118.177,2015-12-24 12:19:28 +31,Janice,Kelley,jkelleyu@creativecommons.org,Female,239.167.34.233,2015-12-24 12:19:28 +32,Terry,Mccoy,tmccoyv@bravesites.com,Male,117.201.183.203,2015-12-24 12:19:28 +33,Jeffrey,Morgan,jmorganw@surveymonkey.com,Male,78.101.78.149,2015-12-24 12:19:28 +34,Louis,Harvey,lharveyx@sina.com.cn,Male,51.50.0.167,2015-12-24 12:19:28 +35,Philip,Miller,pmillery@samsung.com,Male,103.255.222.110,2015-12-24 12:19:28 +36,Willie,Marshall,wmarshallz@ow.ly,Male,149.219.91.68,2015-12-24 12:19:28 +37,Patrick,Lopez,plopez10@redcross.org,Male,250.136.229.89,2015-12-24 12:19:28 +38,Adam,Jenkins,ajenkins11@harvard.edu,Male,7.36.112.81,2015-12-24 12:19:28 +39,Benjamin,Cruz,bcruz12@linkedin.com,Male,32.38.98.15,2015-12-24 12:19:28 +40,Ruby,Hawkins,rhawkins13@gmpg.org,Female,135.171.129.255,2015-12-24 12:19:28 +41,Carlos,Barnes,cbarnes14@a8.net,Male,240.197.85.140,2015-12-24 12:19:28 +42,Ruby,Griffin,rgriffin15@bravesites.com,Female,19.29.135.24,2015-12-24 12:19:28 +43,Sean,Mason,smason16@icq.com,Male,159.219.155.249,2015-12-24 12:19:28 +44,Anthony,Payne,apayne17@utexas.edu,Male,235.168.199.218,2015-12-24 12:19:28 +45,Steve,Cruz,scruz18@pcworld.com,Male,238.201.81.198,2015-12-24 12:19:28 +46,Anthony,Garcia,agarcia19@flavors.me,Male,25.85.10.18,2015-12-24 12:19:28 +47,Doris,Lopez,dlopez1a@sphinn.com,Female,245.218.51.238,2015-12-24 12:19:28 +48,Susan,Nichols,snichols1b@freewebs.com,Female,199.99.9.61,2015-12-24 12:19:28 +49,Wanda,Ferguson,wferguson1c@yahoo.co.jp,Female,236.241.135.21,2015-12-24 12:19:28 +50,Andrea,Pierce,apierce1d@google.co.uk,Female,132.40.10.209,2015-12-24 12:19:28 +51,Lawrence,Phillips,lphillips1e@jugem.jp,Male,72.226.82.87,2015-12-24 12:19:28 +52,Judy,Gilbert,jgilbert1f@multiply.com,Female,196.250.15.142,2015-12-24 12:19:28 +53,Eric,Williams,ewilliams1g@joomla.org,Male,222.202.73.126,2015-12-24 12:19:28 +54,Ralph,Romero,rromero1h@sogou.com,Male,123.184.125.212,2015-12-24 12:19:28 +55,Jean,Wilson,jwilson1i@ocn.ne.jp,Female,176.106.32.194,2015-12-24 12:19:28 +56,Lori,Reynolds,lreynolds1j@illinois.edu,Female,114.181.203.22,2015-12-24 12:19:28 +57,Donald,Moreno,dmoreno1k@bbc.co.uk,Male,233.249.97.60,2015-12-24 12:19:28 +58,Steven,Berry,sberry1l@eepurl.com,Male,186.193.50.50,2015-12-24 12:19:28 +59,Theresa,Shaw,tshaw1m@people.com.cn,Female,120.37.71.222,2015-12-24 12:19:28 +60,John,Stephens,jstephens1n@nationalgeographic.com,Male,191.87.127.115,2015-12-24 12:19:28 +61,Richard,Jacobs,rjacobs1o@state.tx.us,Male,66.210.83.155,2015-12-24 12:19:28 +62,Andrew,Lawson,alawson1p@over-blog.com,Male,54.98.36.94,2015-12-24 12:19:28 +63,Peter,Morgan,pmorgan1q@rambler.ru,Male,14.77.29.106,2015-12-24 12:19:28 +64,Nicole,Garrett,ngarrett1r@zimbio.com,Female,21.127.74.68,2015-12-24 12:19:28 +65,Joshua,Kim,jkim1s@edublogs.org,Male,57.255.207.41,2015-12-24 12:19:28 +66,Ralph,Roberts,rroberts1t@people.com.cn,Male,222.143.131.109,2015-12-24 12:19:28 +67,George,Montgomery,gmontgomery1u@smugmug.com,Male,76.75.111.77,2015-12-24 12:19:28 +68,Gerald,Alvarez,galvarez1v@flavors.me,Male,58.157.186.194,2015-12-24 12:19:28 +69,Donald,Olson,dolson1w@whitehouse.gov,Male,69.65.74.135,2015-12-24 12:19:28 +70,Carlos,Morgan,cmorgan1x@pbs.org,Male,96.20.140.87,2015-12-24 12:19:28 +71,Aaron,Stanley,astanley1y@webnode.com,Male,163.119.217.44,2015-12-24 12:19:28 +72,Virginia,Long,vlong1z@spiegel.de,Female,204.150.194.182,2015-12-24 12:19:28 +73,Robert,Berry,rberry20@tripadvisor.com,Male,104.19.48.241,2015-12-24 12:19:28 +74,Antonio,Brooks,abrooks21@unesco.org,Male,210.31.7.24,2015-12-24 12:19:28 +75,Ruby,Garcia,rgarcia22@ovh.net,Female,233.218.162.214,2015-12-24 12:19:28 +76,Jack,Hanson,jhanson23@blogtalkradio.com,Male,31.55.46.199,2015-12-24 12:19:28 +77,Kathryn,Nelson,knelson24@walmart.com,Female,14.189.146.41,2015-12-24 12:19:28 +78,Jason,Reed,jreed25@printfriendly.com,Male,141.189.89.255,2015-12-24 12:19:28 +79,George,Coleman,gcoleman26@people.com.cn,Male,81.189.221.144,2015-12-24 12:19:28 +80,Rose,King,rking27@ucoz.com,Female,212.123.168.231,2015-12-24 12:19:28 +81,Johnny,Holmes,jholmes28@boston.com,Male,177.3.93.188,2015-12-24 12:19:28 +82,Katherine,Gilbert,kgilbert29@altervista.org,Female,199.215.169.61,2015-12-24 12:19:28 +83,Joshua,Thomas,jthomas2a@ustream.tv,Male,0.8.205.30,2015-12-24 12:19:28 +84,Julie,Perry,jperry2b@opensource.org,Female,60.116.114.192,2015-12-24 12:19:28 +85,Richard,Perry,rperry2c@oracle.com,Male,181.125.70.232,2015-12-24 12:19:28 +86,Kenneth,Ruiz,kruiz2d@wikimedia.org,Male,189.105.137.109,2015-12-24 12:19:28 +87,Jose,Morgan,jmorgan2e@webnode.com,Male,101.134.215.156,2015-12-24 12:19:28 +88,Donald,Campbell,dcampbell2f@goo.ne.jp,Male,102.120.215.84,2015-12-24 12:19:28 +89,Debra,Collins,dcollins2g@uol.com.br,Female,90.13.153.235,2015-12-24 12:19:28 +90,Jesse,Johnson,jjohnson2h@stumbleupon.com,Male,225.178.125.53,2015-12-24 12:19:28 +91,Elizabeth,Stone,estone2i@histats.com,Female,123.184.126.221,2015-12-24 12:19:28 +92,Angela,Rogers,arogers2j@goodreads.com,Female,98.104.132.187,2015-12-24 12:19:28 +93,Emily,Dixon,edixon2k@mlb.com,Female,39.190.75.57,2015-12-24 12:19:28 +94,Albert,Scott,ascott2l@tinypic.com,Male,40.209.13.189,2015-12-24 12:19:28 +95,Barbara,Peterson,bpeterson2m@ow.ly,Female,75.249.136.180,2015-12-24 12:19:28 +96,Adam,Greene,agreene2n@fastcompany.com,Male,184.173.109.144,2015-12-24 12:19:28 +97,Earl,Sanders,esanders2o@hc360.com,Male,247.34.90.117,2015-12-24 12:19:28 +98,Angela,Brooks,abrooks2p@mtv.com,Female,10.63.249.126,2015-12-24 12:19:28 +99,Harold,Foster,hfoster2q@privacy.gov.au,Male,139.214.40.244,2015-12-24 12:19:28 +100,Carl,Meyer,cmeyer2r@disqus.com,Male,204.117.7.88,2015-12-24 12:19:28 diff --git a/tests/functional/projects/graph_selection/data/summary_expected.csv b/tests/functional/projects/graph_selection/data/summary_expected.csv new file mode 100644 index 000000000..0d938030d --- /dev/null +++ b/tests/functional/projects/graph_selection/data/summary_expected.csv @@ -0,0 +1,3 @@ +gender,ct +Female,40 +Male,60 diff --git a/tests/functional/projects/graph_selection/models/alternative_users.sql b/tests/functional/projects/graph_selection/models/alternative_users.sql new file mode 100644 index 000000000..75c67dddd --- /dev/null +++ b/tests/functional/projects/graph_selection/models/alternative_users.sql @@ -0,0 +1,7 @@ +{# Same as ´users´ model, but with dots in the model name #} +{{ config( + materialized = 'table', + tags=['dots'] +) }} + +select * from {{ ref('base_users') }} diff --git a/tests/functional/projects/graph_selection/models/base_users.sql b/tests/functional/projects/graph_selection/models/base_users.sql new file mode 100644 index 000000000..9fe8d40a0 --- /dev/null +++ b/tests/functional/projects/graph_selection/models/base_users.sql @@ -0,0 +1,6 @@ +{{ config( + materialized = 'ephemeral', + tags = ['base'] +) }} + +select * from {{ source('raw', 'seed') }} diff --git a/tests/functional/projects/graph_selection/models/emails.sql b/tests/functional/projects/graph_selection/models/emails.sql new file mode 100644 index 000000000..9dc7a8ebe --- /dev/null +++ b/tests/functional/projects/graph_selection/models/emails.sql @@ -0,0 +1,6 @@ +{{ config( + materialized='ephemeral', + tags=['base'] +) }} + +select distinct email from {{ ref('base_users') }} diff --git a/tests/functional/projects/graph_selection/models/emails_alt.sql b/tests/functional/projects/graph_selection/models/emails_alt.sql new file mode 100644 index 000000000..b0aebc77f --- /dev/null +++ b/tests/functional/projects/graph_selection/models/emails_alt.sql @@ -0,0 +1 @@ +select distinct email from {{ ref('users') }} diff --git a/tests/functional/projects/graph_selection/models/nested_users.sql b/tests/functional/projects/graph_selection/models/nested_users.sql new file mode 100644 index 000000000..43258a714 --- /dev/null +++ b/tests/functional/projects/graph_selection/models/nested_users.sql @@ -0,0 +1 @@ +select 1 as id diff --git a/tests/functional/projects/graph_selection/models/never_selected.sql b/tests/functional/projects/graph_selection/models/never_selected.sql new file mode 100644 index 000000000..ab9c6bb13 --- /dev/null +++ b/tests/functional/projects/graph_selection/models/never_selected.sql @@ -0,0 +1,2 @@ +{{ config(schema='_and_then') }} +select * from {{ this.schema }}.seed diff --git a/tests/functional/projects/graph_selection/models/subdir.sql b/tests/functional/projects/graph_selection/models/subdir.sql new file mode 100644 index 000000000..43258a714 --- /dev/null +++ b/tests/functional/projects/graph_selection/models/subdir.sql @@ -0,0 +1 @@ +select 1 as id diff --git a/tests/functional/projects/graph_selection/models/users.sql b/tests/functional/projects/graph_selection/models/users.sql new file mode 100644 index 000000000..dfd35df52 --- /dev/null +++ b/tests/functional/projects/graph_selection/models/users.sql @@ -0,0 +1,6 @@ +{{ config( + materialized = 'table', + tags=['bi', 'users'] +) }} + +select * from {{ ref('base_users') }} diff --git a/tests/functional/projects/graph_selection/models/users_rollup.sql b/tests/functional/projects/graph_selection/models/users_rollup.sql new file mode 100644 index 000000000..2f349dab3 --- /dev/null +++ b/tests/functional/projects/graph_selection/models/users_rollup.sql @@ -0,0 +1,14 @@ +{{ config( + materialized = 'view', + tags = 'bi' +) }} + +with users as ( + select * from {{ ref('users') }} +) + +select + gender, + count(*) as ct +from users +group by 1 diff --git a/tests/functional/projects/graph_selection/models/users_rollup_dependency.sql b/tests/functional/projects/graph_selection/models/users_rollup_dependency.sql new file mode 100644 index 000000000..b0aa8e3cb --- /dev/null +++ b/tests/functional/projects/graph_selection/models/users_rollup_dependency.sql @@ -0,0 +1,2 @@ +{{ config(materialized='table') }} +select * from {{ ref('users_rollup') }} diff --git a/tests/functional/projects/graph_selection/schemas/patch_path_selection.yml b/tests/functional/projects/graph_selection/schemas/patch_path_selection.yml new file mode 100644 index 000000000..9d95aef9f --- /dev/null +++ b/tests/functional/projects/graph_selection/schemas/patch_path_selection.yml @@ -0,0 +1,5 @@ +version: 2 + +models: + - name: subdir + description: submarine sandwich directory diff --git a/tests/functional/projects/graph_selection/schemas/properties.yml b/tests/functional/projects/graph_selection/schemas/properties.yml new file mode 100644 index 000000000..b771566a2 --- /dev/null +++ b/tests/functional/projects/graph_selection/schemas/properties.yml @@ -0,0 +1,8 @@ +version: 2 + +seeds: + - name: summary_expected + config: + column_types: + ct: BIGINT + gender: text diff --git a/tests/functional/projects/graph_selection/schemas/schema.yml b/tests/functional/projects/graph_selection/schemas/schema.yml new file mode 100644 index 000000000..2aab967a6 --- /dev/null +++ b/tests/functional/projects/graph_selection/schemas/schema.yml @@ -0,0 +1,78 @@ +version: 2 + +groups: + - name: emails_group + owner: + name: Jeremy + email: data@jer.co + slack: talk-jerco-memes + github: jtcohen6 + whatever: you want + - name: users_group + owner: + name: Jeremy + email: data@jer.co + slack: talk-jerco-memes + github: jtcohen6 + whatever: you want + - name: users_rollup_group + owner: + name: Jeremy + email: data@jer.co + slack: talk-jerco-memes + github: jtcohen6 + whatever: you want + +models: + - name: emails + group: emails_group + columns: + - name: email + data_tests: + - not_null: + severity: warn + - name: users + group: users_group + columns: + - name: id + data_tests: + - unique + - name: users_rollup + group: users_rollup_group + columns: + - name: gender + data_tests: + - unique + - name: versioned + latest_version: 2 + versions: + - v: 0 + - v: 1 + - v: 2 + - v: 3 + - v: 4.5 + - v: "5.0" + - v: 21 + - v: "test" + +sources: + - name: raw + schema: '{{ target.schema }}' + tables: + - name: seed + +exposures: + - name: user_exposure + type: dashboard + depends_on: + - ref('users') + - ref('users_rollup') + - ref('versioned', v=3) + owner: + email: nope@example.com + - name: seed_ml_exposure + type: ml + depends_on: + - source('raw', 'seed') + owner: + email: nope@example.com diff --git a/tests/functional/projects/jaffle_shop/__init__.py b/tests/functional/projects/jaffle_shop/__init__.py new file mode 100644 index 000000000..5a84ff65b --- /dev/null +++ b/tests/functional/projects/jaffle_shop/__init__.py @@ -0,0 +1,58 @@ +from functools import partial + +import pytest + +from tests.functional.projects.utils import read + + +read_data = partial(read, project="jaffle_shop", file_type="data") +read_doc = partial(read, project="jaffle_shop", file_type="docs") +read_model = partial(read, project="jaffle_shop", file_type="models") +read_schema = partial(read, project="jaffle_shop", file_type="schemas") +read_staging = partial(read, project="jaffle_shop", file_type="staging") + + +class JaffleShop: + @pytest.fixture(scope="class") + def models(self): + return { + "customers.sql": read_model("customers"), + "docs.md": read_doc("docs"), + "orders.sql": read_model("orders"), + "ignored_model1.sql": "select 1 as id", + "ignored_model2.sql": "select 1 as id", + "overview.md": read_doc("overview"), + "schema.yml": read_schema("jaffle_shop"), + "ignore_folder": { + "model1.sql": "select 1 as id", + "model2.sql": "select 1 as id", + }, + "staging": { + "schema.yml": read_schema("staging"), + "stg_customers.sql": read_staging("stg_customers"), + "stg_orders.sql": read_staging("stg_orders"), + "stg_payments.sql": read_staging("stg_payments"), + }, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "raw_customers.csv": read_data("raw_customers"), + "raw_orders.csv": read_data("raw_orders"), + "raw_payments.csv": read_data("raw_payments"), + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "name": "jaffle_shop", + "models": { + "jaffle_shop": { + "materialized": "table", + "staging": { + "materialized": "view", + }, + } + }, + } diff --git a/tests/functional/projects/jaffle_shop/data/raw_customers.csv b/tests/functional/projects/jaffle_shop/data/raw_customers.csv new file mode 100644 index 000000000..b3e6747d6 --- /dev/null +++ b/tests/functional/projects/jaffle_shop/data/raw_customers.csv @@ -0,0 +1,101 @@ +id,first_name,last_name +1,Michael,P. +2,Shawn,M. +3,Kathleen,P. +4,Jimmy,C. +5,Katherine,R. +6,Sarah,R. +7,Martin,M. +8,Frank,R. +9,Jennifer,F. +10,Henry,W. +11,Fred,S. +12,Amy,D. +13,Kathleen,M. +14,Steve,F. +15,Teresa,H. +16,Amanda,H. +17,Kimberly,R. +18,Johnny,K. +19,Virginia,F. +20,Anna,A. +21,Willie,H. +22,Sean,H. +23,Mildred,A. +24,David,G. +25,Victor,H. +26,Aaron,R. +27,Benjamin,B. +28,Lisa,W. +29,Benjamin,K. +30,Christina,W. +31,Jane,G. +32,Thomas,O. +33,Katherine,M. +34,Jennifer,S. +35,Sara,T. +36,Harold,O. +37,Shirley,J. +38,Dennis,J. +39,Louise,W. +40,Maria,A. +41,Gloria,C. +42,Diana,S. +43,Kelly,N. +44,Jane,R. +45,Scott,B. +46,Norma,C. +47,Marie,P. +48,Lillian,C. +49,Judy,N. +50,Billy,L. +51,Howard,R. +52,Laura,F. +53,Anne,B. +54,Rose,M. +55,Nicholas,R. +56,Joshua,K. +57,Paul,W. +58,Kathryn,K. +59,Adam,A. +60,Norma,W. +61,Timothy,R. +62,Elizabeth,P. +63,Edward,G. +64,David,C. +65,Brenda,W. +66,Adam,W. +67,Michael,H. +68,Jesse,E. +69,Janet,P. +70,Helen,F. +71,Gerald,C. +72,Kathryn,O. +73,Alan,B. +74,Harry,A. +75,Andrea,H. +76,Barbara,W. +77,Anne,W. +78,Harry,H. +79,Jack,R. +80,Phillip,H. +81,Shirley,H. +82,Arthur,D. +83,Virginia,R. +84,Christina,R. +85,Theresa,M. +86,Jason,C. +87,Phillip,B. +88,Adam,T. +89,Margaret,J. +90,Paul,P. +91,Todd,W. +92,Willie,O. +93,Frances,R. +94,Gregory,H. +95,Lisa,P. +96,Jacqueline,A. +97,Shirley,D. +98,Nicole,M. +99,Mary,G. +100,Jean,M. diff --git a/tests/functional/projects/jaffle_shop/data/raw_orders.csv b/tests/functional/projects/jaffle_shop/data/raw_orders.csv new file mode 100644 index 000000000..c4870621b --- /dev/null +++ b/tests/functional/projects/jaffle_shop/data/raw_orders.csv @@ -0,0 +1,100 @@ +id,user_id,order_date,status +1,1,2018-01-01,returned +2,3,2018-01-02,completed +3,94,2018-01-04,completed +4,50,2018-01-05,completed +5,64,2018-01-05,completed +6,54,2018-01-07,completed +7,88,2018-01-09,completed +8,2,2018-01-11,returned +9,53,2018-01-12,completed +10,7,2018-01-14,completed +11,99,2018-01-14,completed +12,59,2018-01-15,completed +13,84,2018-01-17,completed +14,40,2018-01-17,returned +15,25,2018-01-17,completed +16,39,2018-01-18,completed +17,71,2018-01-18,completed +18,64,2018-01-20,returned +19,54,2018-01-22,completed +20,20,2018-01-23,completed +21,71,2018-01-23,completed +22,86,2018-01-24,completed +23,22,2018-01-26,return_pending +24,3,2018-01-27,completed +25,51,2018-01-28,completed +26,32,2018-01-28,completed +27,94,2018-01-29,completed +28,8,2018-01-29,completed +29,57,2018-01-31,completed +30,69,2018-02-02,completed +31,16,2018-02-02,completed +32,28,2018-02-04,completed +33,42,2018-02-04,completed +34,38,2018-02-06,completed +35,80,2018-02-08,completed +36,85,2018-02-10,completed +37,1,2018-02-10,completed +38,51,2018-02-10,completed +39,26,2018-02-11,completed +40,33,2018-02-13,completed +41,99,2018-02-14,completed +42,92,2018-02-16,completed +43,31,2018-02-17,completed +44,66,2018-02-17,completed +45,22,2018-02-17,completed +46,6,2018-02-19,completed +47,50,2018-02-20,completed +48,27,2018-02-21,completed +49,35,2018-02-21,completed +50,51,2018-02-23,completed +51,71,2018-02-24,completed +52,54,2018-02-25,return_pending +53,34,2018-02-26,completed +54,54,2018-02-26,completed +55,18,2018-02-27,completed +56,79,2018-02-28,completed +57,93,2018-03-01,completed +58,22,2018-03-01,completed +59,30,2018-03-02,completed +60,12,2018-03-03,completed +61,63,2018-03-03,completed +62,57,2018-03-05,completed +63,70,2018-03-06,completed +64,13,2018-03-07,completed +65,26,2018-03-08,completed +66,36,2018-03-10,completed +67,79,2018-03-11,completed +68,53,2018-03-11,completed +69,3,2018-03-11,completed +70,8,2018-03-12,completed +71,42,2018-03-12,shipped +72,30,2018-03-14,shipped +73,19,2018-03-16,completed +74,9,2018-03-17,shipped +75,69,2018-03-18,completed +76,25,2018-03-20,completed +77,35,2018-03-21,shipped +78,90,2018-03-23,shipped +79,52,2018-03-23,shipped +80,11,2018-03-23,shipped +81,76,2018-03-23,shipped +82,46,2018-03-24,shipped +83,54,2018-03-24,shipped +84,70,2018-03-26,placed +85,47,2018-03-26,shipped +86,68,2018-03-26,placed +87,46,2018-03-27,placed +88,91,2018-03-27,shipped +89,21,2018-03-28,placed +90,66,2018-03-30,shipped +91,47,2018-03-31,placed +92,84,2018-04-02,placed +93,66,2018-04-03,placed +94,63,2018-04-03,placed +95,27,2018-04-04,placed +96,90,2018-04-06,placed +97,89,2018-04-07,placed +98,41,2018-04-07,placed +99,85,2018-04-09,placed diff --git a/tests/functional/projects/jaffle_shop/data/raw_payments.csv b/tests/functional/projects/jaffle_shop/data/raw_payments.csv new file mode 100644 index 000000000..a587baab5 --- /dev/null +++ b/tests/functional/projects/jaffle_shop/data/raw_payments.csv @@ -0,0 +1,114 @@ +id,order_id,payment_method,amount +1,1,credit_card,1000 +2,2,credit_card,2000 +3,3,coupon,100 +4,4,coupon,2500 +5,5,bank_transfer,1700 +6,6,credit_card,600 +7,7,credit_card,1600 +8,8,credit_card,2300 +9,9,gift_card,2300 +10,9,bank_transfer,0 +11,10,bank_transfer,2600 +12,11,credit_card,2700 +13,12,credit_card,100 +14,13,credit_card,500 +15,13,bank_transfer,1400 +16,14,bank_transfer,300 +17,15,coupon,2200 +18,16,credit_card,1000 +19,17,bank_transfer,200 +20,18,credit_card,500 +21,18,credit_card,800 +22,19,gift_card,600 +23,20,bank_transfer,1500 +24,21,credit_card,1200 +25,22,bank_transfer,800 +26,23,gift_card,2300 +27,24,coupon,2600 +28,25,bank_transfer,2000 +29,25,credit_card,2200 +30,25,coupon,1600 +31,26,credit_card,3000 +32,27,credit_card,2300 +33,28,bank_transfer,1900 +34,29,bank_transfer,1200 +35,30,credit_card,1300 +36,31,credit_card,1200 +37,32,credit_card,300 +38,33,credit_card,2200 +39,34,bank_transfer,1500 +40,35,credit_card,2900 +41,36,bank_transfer,900 +42,37,credit_card,2300 +43,38,credit_card,1500 +44,39,bank_transfer,800 +45,40,credit_card,1400 +46,41,credit_card,1700 +47,42,coupon,1700 +48,43,gift_card,1800 +49,44,gift_card,1100 +50,45,bank_transfer,500 +51,46,bank_transfer,800 +52,47,credit_card,2200 +53,48,bank_transfer,300 +54,49,credit_card,600 +55,49,credit_card,900 +56,50,credit_card,2600 +57,51,credit_card,2900 +58,51,credit_card,100 +59,52,bank_transfer,1500 +60,53,credit_card,300 +61,54,credit_card,1800 +62,54,bank_transfer,1100 +63,55,credit_card,2900 +64,56,credit_card,400 +65,57,bank_transfer,200 +66,58,coupon,1800 +67,58,gift_card,600 +68,59,gift_card,2800 +69,60,credit_card,400 +70,61,bank_transfer,1600 +71,62,gift_card,1400 +72,63,credit_card,2900 +73,64,bank_transfer,2600 +74,65,credit_card,0 +75,66,credit_card,2800 +76,67,bank_transfer,400 +77,67,credit_card,1900 +78,68,credit_card,1600 +79,69,credit_card,1900 +80,70,credit_card,2600 +81,71,credit_card,500 +82,72,credit_card,2900 +83,73,bank_transfer,300 +84,74,credit_card,3000 +85,75,credit_card,1900 +86,76,coupon,200 +87,77,credit_card,0 +88,77,bank_transfer,1900 +89,78,bank_transfer,2600 +90,79,credit_card,1800 +91,79,credit_card,900 +92,80,gift_card,300 +93,81,coupon,200 +94,82,credit_card,800 +95,83,credit_card,100 +96,84,bank_transfer,2500 +97,85,bank_transfer,1700 +98,86,coupon,2300 +99,87,gift_card,3000 +100,87,credit_card,2600 +101,88,credit_card,2900 +102,89,bank_transfer,2200 +103,90,bank_transfer,200 +104,91,credit_card,1900 +105,92,bank_transfer,1500 +106,92,coupon,200 +107,93,gift_card,2600 +108,94,coupon,700 +109,95,coupon,2400 +110,96,gift_card,1700 +111,97,bank_transfer,1400 +112,98,bank_transfer,1000 +113,99,credit_card,2400 diff --git a/tests/functional/projects/jaffle_shop/docs/docs.md b/tests/functional/projects/jaffle_shop/docs/docs.md new file mode 100644 index 000000000..c6ae93be0 --- /dev/null +++ b/tests/functional/projects/jaffle_shop/docs/docs.md @@ -0,0 +1,14 @@ +{% docs orders_status %} + +Orders can be one of the following statuses: + +| status | description | +|----------------|------------------------------------------------------------------------------------------------------------------------| +| placed | The order has been placed but has not yet left the warehouse | +| shipped | The order has ben shipped to the customer and is currently in transit | +| completed | The order has been received by the customer | +| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse | +| returned | The order has been returned by the customer and received at the warehouse | + + +{% enddocs %} diff --git a/tests/functional/projects/jaffle_shop/docs/overview.md b/tests/functional/projects/jaffle_shop/docs/overview.md new file mode 100644 index 000000000..0544c42b1 --- /dev/null +++ b/tests/functional/projects/jaffle_shop/docs/overview.md @@ -0,0 +1,11 @@ +{% docs __overview__ %} + +## Data Documentation for Jaffle Shop + +`jaffle_shop` is a fictional ecommerce store. + +This [dbt](https://www.getdbt.com/) project is for testing out code. + +The source code can be found [here](https://github.com/clrcrl/jaffle_shop). + +{% enddocs %} diff --git a/tests/functional/projects/jaffle_shop/models/customers.sql b/tests/functional/projects/jaffle_shop/models/customers.sql new file mode 100644 index 000000000..016a004fe --- /dev/null +++ b/tests/functional/projects/jaffle_shop/models/customers.sql @@ -0,0 +1,69 @@ +with customers as ( + + select * from {{ ref('stg_customers') }} + +), + +orders as ( + + select * from {{ ref('stg_orders') }} + +), + +payments as ( + + select * from {{ ref('stg_payments') }} + +), + +customer_orders as ( + + select + customer_id, + + min(order_date) as first_order, + max(order_date) as most_recent_order, + count(order_id) as number_of_orders + from orders + + group by customer_id + +), + +customer_payments as ( + + select + orders.customer_id, + sum(amount) as total_amount + + from payments + + left join orders on + payments.order_id = orders.order_id + + group by orders.customer_id + +), + +final as ( + + select + customers.customer_id, + customers.first_name, + customers.last_name, + customer_orders.first_order, + customer_orders.most_recent_order, + customer_orders.number_of_orders, + customer_payments.total_amount as customer_lifetime_value + + from customers + + left join customer_orders + on customers.customer_id = customer_orders.customer_id + + left join customer_payments + on customers.customer_id = customer_payments.customer_id + +) + +select * from final diff --git a/tests/functional/projects/jaffle_shop/models/orders.sql b/tests/functional/projects/jaffle_shop/models/orders.sql new file mode 100644 index 000000000..cbb293491 --- /dev/null +++ b/tests/functional/projects/jaffle_shop/models/orders.sql @@ -0,0 +1,56 @@ +{% set payment_methods = ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] %} + +with orders as ( + + select * from {{ ref('stg_orders') }} + +), + +payments as ( + + select * from {{ ref('stg_payments') }} + +), + +order_payments as ( + + select + order_id, + + {% for payment_method in payment_methods -%} + sum(case when payment_method = '{{ payment_method }}' then amount else 0 end) as {{ payment_method }}_amount, + {% endfor -%} + + sum(amount) as total_amount + + from payments + + group by order_id + +), + +final as ( + + select + orders.order_id, + orders.customer_id, + orders.order_date, + orders.status, + + {% for payment_method in payment_methods -%} + + order_payments.{{ payment_method }}_amount, + + {% endfor -%} + + order_payments.total_amount as amount + + from orders + + + left join order_payments + on orders.order_id = order_payments.order_id + +) + +select * from final diff --git a/tests/functional/projects/jaffle_shop/schemas/jaffle_shop.yml b/tests/functional/projects/jaffle_shop/schemas/jaffle_shop.yml new file mode 100644 index 000000000..6c71616ac --- /dev/null +++ b/tests/functional/projects/jaffle_shop/schemas/jaffle_shop.yml @@ -0,0 +1,82 @@ +version: 2 + +models: + - name: customers + description: This table has basic information about a customer, as well as some derived facts based on a customer's orders + + columns: + - name: customer_id + description: This is a unique identifier for a customer + data_tests: + - unique + - not_null + + - name: first_name + description: Customer's first name. PII. + + - name: last_name + description: Customer's last name. PII. + + - name: first_order + description: Date (UTC) of a customer's first order + + - name: most_recent_order + description: Date (UTC) of a customer's most recent order + + - name: number_of_orders + description: Count of the number of orders a customer has placed + + - name: total_order_amount + description: Total value (AUD) of a customer's orders + + - name: orders + description: This table has basic information about orders, as well as some derived facts based on payments + + columns: + - name: order_id + data_tests: + - unique + - not_null + description: This is a unique identifier for an order + + - name: customer_id + description: Foreign key to the customers table + data_tests: + - not_null + - relationships: + to: ref('customers') + field: customer_id + + - name: order_date + description: Date (UTC) that the order was placed + + - name: status + description: '{{ doc("orders_status") }}' + data_tests: + - accepted_values: + values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] + + - name: amount + description: Total amount (AUD) of the order + data_tests: + - not_null + + - name: credit_card_amount + description: Amount of the order (AUD) paid for by credit card + data_tests: + - not_null + + - name: coupon_amount + description: Amount of the order (AUD) paid for by coupon + data_tests: + - not_null + + - name: bank_transfer_amount + description: Amount of the order (AUD) paid for by bank transfer + data_tests: + - not_null + + - name: gift_card_amount + description: Amount of the order (AUD) paid for by gift card + data_tests: + - not_null diff --git a/tests/functional/projects/jaffle_shop/schemas/staging.yml b/tests/functional/projects/jaffle_shop/schemas/staging.yml new file mode 100644 index 000000000..f47b0f6d4 --- /dev/null +++ b/tests/functional/projects/jaffle_shop/schemas/staging.yml @@ -0,0 +1,31 @@ +version: 2 + +models: + - name: stg_customers + columns: + - name: customer_id + data_tests: + - unique + - not_null + + - name: stg_orders + columns: + - name: order_id + data_tests: + - unique + - not_null + - name: status + data_tests: + - accepted_values: + values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] + + - name: stg_payments + columns: + - name: payment_id + data_tests: + - unique + - not_null + - name: payment_method + data_tests: + - accepted_values: + values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] diff --git a/tests/functional/projects/jaffle_shop/staging/stg_customers.sql b/tests/functional/projects/jaffle_shop/staging/stg_customers.sql new file mode 100644 index 000000000..cad047269 --- /dev/null +++ b/tests/functional/projects/jaffle_shop/staging/stg_customers.sql @@ -0,0 +1,22 @@ +with source as ( + + {#- + Normally we would select from the table here, but we are using seeds to load + our data in this project + #} + select * from {{ ref('raw_customers') }} + +), + +renamed as ( + + select + id as customer_id, + first_name, + last_name + + from source + +) + +select * from renamed diff --git a/tests/functional/projects/jaffle_shop/staging/stg_orders.sql b/tests/functional/projects/jaffle_shop/staging/stg_orders.sql new file mode 100644 index 000000000..a654dcb94 --- /dev/null +++ b/tests/functional/projects/jaffle_shop/staging/stg_orders.sql @@ -0,0 +1,23 @@ +with source as ( + + {#- + Normally we would select from the table here, but we are using seeds to load + our data in this project + #} + select * from {{ ref('raw_orders') }} + +), + +renamed as ( + + select + id as order_id, + user_id as customer_id, + order_date, + status + + from source + +) + +select * from renamed diff --git a/tests/functional/projects/jaffle_shop/staging/stg_payments.sql b/tests/functional/projects/jaffle_shop/staging/stg_payments.sql new file mode 100644 index 000000000..f718596ad --- /dev/null +++ b/tests/functional/projects/jaffle_shop/staging/stg_payments.sql @@ -0,0 +1,25 @@ +with source as ( + + {#- + Normally we would select from the table here, but we are using seeds to load + our data in this project + #} + select * from {{ ref('raw_payments') }} + +), + +renamed as ( + + select + id as payment_id, + order_id, + payment_method, + + -- `amount` is currently stored in cents, so we convert it to dollars + amount / 100 as amount + + from source + +) + +select * from renamed diff --git a/tests/functional/projects/utils.py b/tests/functional/projects/utils.py new file mode 100644 index 000000000..51682d2c5 --- /dev/null +++ b/tests/functional/projects/utils.py @@ -0,0 +1,20 @@ +from pathlib import Path + + +FILE_TYPES = { + "data": "csv", + "docs": "md", + "models": "sql", + "schemas": "yml", + "staging": "sql", +} + + +def read(project: str, file_type: str, file_name: str) -> str: + root = Path(__file__) / project + extension = FILE_TYPES[file_type] + file = root / file_type / f"{file_name}.{extension}" + contents = file.read_text() + if file_type == "data": + return contents.strip() + return contents diff --git a/tests/functional/retry/fixtures.py b/tests/functional/retry/fixtures.py new file mode 100644 index 000000000..64adf9c68 --- /dev/null +++ b/tests/functional/retry/fixtures.py @@ -0,0 +1,60 @@ +models__sample_model = """select 1 as id, baz as foo""" +models__second_model = """select 1 as id, 2 as bar""" + +models__union_model = """ +select foo + bar as sum3 from {{ ref('sample_model') }} +left join {{ ref('second_model') }} on sample_model.id = second_model.id +""" + +schema_yml = """ +models: + - name: sample_model + columns: + - name: foo + data_tests: + - accepted_values: + values: [3] + quote: false + config: + severity: warn + - name: second_model + columns: + - name: bar + data_tests: + - accepted_values: + values: [3] + quote: false + config: + severity: warn + - name: union_model + columns: + - name: sum3 + data_tests: + - accepted_values: + values: [3] + quote: false +""" + +macros__alter_timezone_sql = """ +{% macro alter_timezone(timezone='America/Los_Angeles') %} +{% set sql %} + SET TimeZone='{{ timezone }}'; +{% endset %} + +{% do run_query(sql) %} +{% do log("Timezone set to: " + timezone, info=True) %} +{% endmacro %} +""" + +simple_model = """ +select null as id +""" + +simple_schema = """ +models: + - name: some_model + columns: + - name: id + data_tests: + - not_null +""" diff --git a/tests/functional/retry/test_retry.py b/tests/functional/retry/test_retry.py new file mode 100644 index 000000000..2bdb46beb --- /dev/null +++ b/tests/functional/retry/test_retry.py @@ -0,0 +1,330 @@ +from shutil import copytree, move + +from dbt.contracts.results import RunStatus, TestStatus +from dbt.exceptions import TargetNotFoundError +from dbt.tests.util import rm_file, run_dbt, write_file +from dbt_common.exceptions import DbtRuntimeError +import pytest + +from tests.functional.retry.fixtures import ( + macros__alter_timezone_sql, + models__sample_model, + models__second_model, + models__union_model, + schema_yml, + simple_model, + simple_schema, +) + + +class TestCustomTargetRetry: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": models__sample_model, + "second_model.sql": models__second_model, + "schema.yml": schema_yml, + } + + def test_custom_target(self, project): + run_dbt(["build", "--select", "second_model"]) + run_dbt( + ["build", "--select", "sample_model", "--target-path", "target2"], expect_pass=False + ) + + # Regular retry - this is a no op because it's actually running `dbt build --select second_model` + # agian because it's looking at the default target since the custom_target wasn't passed in + results = run_dbt(["retry"]) + assert len(results) == 0 + + # Retry with custom target after fixing the error + fixed_sql = "select 1 as id, 1 as foo" + write_file(fixed_sql, "models", "sample_model.sql") + + results = run_dbt(["retry", "--state", "target2"]) + expected_statuses = { + "sample_model": RunStatus.Success, + "accepted_values_sample_model_foo__False__3": TestStatus.Warn, + } + + assert {n.node.name: n.status for n in results.results} == expected_statuses + + write_file(models__sample_model, "models", "sample_model.sql") + + +class TestRetry: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": models__sample_model, + "second_model.sql": models__second_model, + "union_model.sql": models__union_model, + "schema.yml": schema_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"alter_timezone.sql": macros__alter_timezone_sql} + + def test_no_previous_run(self, project): + with pytest.raises( + DbtRuntimeError, match="Could not find previous run in 'target' target directory" + ): + run_dbt(["retry"]) + + with pytest.raises( + DbtRuntimeError, match="Could not find previous run in 'walmart' target directory" + ): + run_dbt(["retry", "--state", "walmart"]) + + def test_previous_run(self, project): + # Regular build + results = run_dbt(["build"], expect_pass=False) + + expected_statuses = { + "sample_model": RunStatus.Error, + "second_model": RunStatus.Success, + "union_model": RunStatus.Skipped, + "accepted_values_sample_model_foo__False__3": RunStatus.Skipped, + "accepted_values_second_model_bar__False__3": TestStatus.Warn, + "accepted_values_union_model_sum3__False__3": RunStatus.Skipped, + } + + assert {n.node.name: n.status for n in results.results} == expected_statuses + + # Ignore second_model which succeeded + results = run_dbt(["retry"], expect_pass=False) + + expected_statuses = { + "sample_model": RunStatus.Error, + "union_model": RunStatus.Skipped, + "accepted_values_union_model_sum3__False__3": RunStatus.Skipped, + "accepted_values_sample_model_foo__False__3": RunStatus.Skipped, + } + + assert {n.node.name: n.status for n in results.results} == expected_statuses + + # Fix sample model and retry, everything should pass + fixed_sql = "select 1 as id, 1 as foo" + write_file(fixed_sql, "models", "sample_model.sql") + + results = run_dbt(["retry"]) + + expected_statuses = { + "sample_model": RunStatus.Success, + "union_model": RunStatus.Success, + "accepted_values_union_model_sum3__False__3": TestStatus.Pass, + "accepted_values_sample_model_foo__False__3": TestStatus.Warn, + } + + assert {n.node.name: n.status for n in results.results} == expected_statuses + + # No failures in previous run, nothing to retry + results = run_dbt(["retry"]) + expected_statuses = {} + assert {n.node.name: n.status for n in results.results} == expected_statuses + + write_file(models__sample_model, "models", "sample_model.sql") + + def test_warn_error(self, project): + # Our test command should succeed when run normally... + results = run_dbt(["build", "--select", "second_model"]) + + # ...but it should fail when run with warn-error, due to a warning... + results = run_dbt(["--warn-error", "build", "--select", "second_model"], expect_pass=False) + + expected_statuses = { + "second_model": RunStatus.Success, + "accepted_values_second_model_bar__False__3": TestStatus.Fail, + } + + assert {n.node.name: n.status for n in results.results} == expected_statuses + + # Retry regular, should pass + run_dbt(["retry"]) + + # Retry with --warn-error, should fail + run_dbt(["--warn-error", "retry"], expect_pass=False) + + def test_run_operation(self, project): + results = run_dbt( + ["run-operation", "alter_timezone", "--args", "{timezone: abc}"], expect_pass=False + ) + + expected_statuses = { + "macro.test.alter_timezone": RunStatus.Error, + } + + assert {n.unique_id: n.status for n in results.results} == expected_statuses + + results = run_dbt(["retry"], expect_pass=False) + assert {n.unique_id: n.status for n in results.results} == expected_statuses + + def test_removed_file(self, project): + run_dbt(["build"], expect_pass=False) + + rm_file("models", "sample_model.sql") + + with pytest.raises( + TargetNotFoundError, match="depends on a node named 'sample_model' which was not found" + ): + run_dbt(["retry"], expect_pass=False) + + write_file(models__sample_model, "models", "sample_model.sql") + + def test_removed_file_leaf_node(self, project): + write_file(models__sample_model, "models", "third_model.sql") + run_dbt(["build"], expect_pass=False) + + rm_file("models", "third_model.sql") + with pytest.raises(ValueError, match="Couldn't find model 'model.test.third_model'"): + run_dbt(["retry"], expect_pass=False) + + +class TestFailFast: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": models__sample_model, + "second_model.sql": models__second_model, + "union_model.sql": models__union_model, + "final_model.sql": "select * from {{ ref('union_model') }};", + } + + def test_fail_fast(self, project): + results = run_dbt(["--fail-fast", "build"], expect_pass=False) + assert {r.node.unique_id: r.status for r in results.results} == { + "model.test.sample_model": RunStatus.Error, + "model.test.second_model": RunStatus.Success, + "model.test.union_model": RunStatus.Skipped, + "model.test.final_model": RunStatus.Skipped, + } + + # Check that retry inherits fail-fast from upstream command (build) + results = run_dbt(["retry"], expect_pass=False) + assert {r.node.unique_id: r.status for r in results.results} == { + "model.test.sample_model": RunStatus.Error, + "model.test.union_model": RunStatus.Skipped, + "model.test.final_model": RunStatus.Skipped, + } + + fixed_sql = "select 1 as id, 1 as foo" + write_file(fixed_sql, "models", "sample_model.sql") + + results = run_dbt(["retry"], expect_pass=False) + assert {r.node.unique_id: r.status for r in results.results} == { + "model.test.sample_model": RunStatus.Success, + "model.test.union_model": RunStatus.Success, + "model.test.final_model": RunStatus.Error, + } + + results = run_dbt(["retry"], expect_pass=False) + assert {r.node.unique_id: r.status for r in results.results} == { + "model.test.final_model": RunStatus.Error, + } + + fixed_sql = "select * from {{ ref('union_model') }}" + write_file(fixed_sql, "models", "final_model.sql") + + results = run_dbt(["retry"]) + assert {r.node.unique_id: r.status for r in results.results} == { + "model.test.final_model": RunStatus.Success, + } + + results = run_dbt(["retry"]) + assert {r.node.unique_id: r.status for r in results.results} == {} + + +class TestRetryResourceType: + @pytest.fixture(scope="class") + def models(self): + return { + "null_model.sql": simple_model, + "schema.yml": simple_schema, + } + + def test_resource_type(self, project): + # test multiple options in single string + results = run_dbt(["build", "--select", "null_model", "--resource-type", "test model"]) + assert len(results) == 1 + + # nothing to do + results = run_dbt(["retry"]) + assert len(results) == 0 + + # test multiple options in multiple args + results = run_dbt( + [ + "build", + "--select", + "null_model", + "--resource-type", + "test", + "--resource-type", + "model", + ] + ) + assert len(results) == 1 + + # nothing to do + results = run_dbt(["retry"]) + assert len(results) == 0 + + # test single all option + results = run_dbt(["build", "--select", "null_model", "--resource-type", "all"]) + assert len(results) == 1 + + # nothing to do + results = run_dbt(["retry"]) + assert len(results) == 0 + + +class TestRetryOverridePath: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": models__sample_model, + } + + def test_retry(self, project): + project_root = project.project_root + proj_location_1 = project_root / "proj_location_1" + proj_location_2 = project_root / "proj_location_2" + + copytree(project_root, proj_location_1) + run_dbt(["run", "--project-dir", "proj_location_1"], expect_pass=False) + move(proj_location_1, proj_location_2) + run_dbt(["retry", "--project-dir", "proj_location_2"], expect_pass=False) + + +class TestRetryVars: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": "select {{ var('myvar_a', '1') + var('myvar_b', '2') }} as mycol", + } + + def test_retry(self, project): + # pass because default vars works + run_dbt(["run"]) + run_dbt(["run", "--vars", '{"myvar_a": "12", "myvar_b": "3 4"}'], expect_pass=False) + # fail because vars are invalid, this shows that the last passed vars are being used + # instead of using the default vars + run_dbt(["retry"], expect_pass=False) + results = run_dbt(["retry", "--vars", '{"myvar_a": "12", "myvar_b": "34"}']) + assert len(results) == 1 + + +class TestRetryFullRefresh: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": "{% if flags.FULL_REFRESH %} this is invalid sql {% else %} select 1 as mycol {% endif %}", + } + + def test_retry(self, project): + # This run should fail with invalid sql... + run_dbt(["run", "--full-refresh"], expect_pass=False) + # ...and so should this one, since the effect of the full-refresh parameter should persist. + results = run_dbt(["retry"], expect_pass=False) + assert len(results) == 1 diff --git a/tests/functional/run_operations/fixtures.py b/tests/functional/run_operations/fixtures.py new file mode 100644 index 000000000..f6ed82e20 --- /dev/null +++ b/tests/functional/run_operations/fixtures.py @@ -0,0 +1,72 @@ +happy_macros_sql = """ +{% macro no_args() %} + {% if execute %} + {% call statement(auto_begin=True) %} + create table "{{ schema }}"."no_args" (id int); + commit; + {% endcall %} + {% endif %} +{% endmacro %} + + +{% macro table_name_args(table_name) %} + {% if execute %} + {% call statement(auto_begin=True) %} + create table "{{ schema }}"."{{ table_name }}" (id int); + commit; + {% endcall %} + {% endif %} +{% endmacro %} + +{% macro select_something(name) %} + {% set query %} + select 'hello, {{ name }}' as name + {% endset %} + {% set table = run_query(query) %} + + {% if table.columns['name'][0] != 'hello, world' %} + {% do exceptions.raise_compiler_error("unexpected result: " ~ table) %} + {% endif %} +{% endmacro %} + +{% macro vacuum(table_name) %} + {% set query %} + vacuum "{{ schema }}"."{{ table_name }}" + {% endset %} + {% do run_query(query) %} +{% endmacro %} + + +{% macro vacuum_ref(ref_target) %} + {% set query %} + vacuum {{ ref(ref_target) }} + {% endset %} + {% do run_query(query) %} +{% endmacro %} + + +{% macro log_graph() %} + {% for node in graph.nodes.values() %} + {{ log((node | string), info=True)}} + {% endfor %} +{% endmacro %} + + +{% macro print_something() %} + {{ print("You're doing awesome!") }} +{% endmacro %} +""" + +sad_macros_sql = """ +{% macro syntax_error() %} + {% if execute %} + {% call statement() %} + select NOPE NOT A VALID QUERY + {% endcall %} + {% endif %} +{% endmacro %} +""" + +model_sql = """ +select 1 as id +""" diff --git a/tests/functional/run_operations/test_run_operations.py b/tests/functional/run_operations/test_run_operations.py new file mode 100644 index 000000000..ea077dcf2 --- /dev/null +++ b/tests/functional/run_operations/test_run_operations.py @@ -0,0 +1,144 @@ +import os + +from dbt.tests.util import ( + check_table_does_exist, + mkdir, + rm_dir, + rm_file, + run_dbt, + run_dbt_and_capture, + write_file, +) +from dbt_common.exceptions import DbtInternalError +import pytest +import yaml + +from tests.functional.run_operations.fixtures import ( + happy_macros_sql, + model_sql, + sad_macros_sql, +) + + +class TestOperations: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": model_sql} + + @pytest.fixture(scope="class") + def macros(self): + return {"happy_macros.sql": happy_macros_sql, "sad_macros.sql": sad_macros_sql} + + @pytest.fixture(scope="class") + def dbt_profile_data(self, unique_schema): + return { + "test": { + "outputs": { + "default": { + "type": "postgres", + "threads": 4, + "host": "localhost", + "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), + "user": os.getenv("POSTGRES_TEST_USER", "root"), + "pass": os.getenv("POSTGRES_TEST_PASS", "password"), + "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), + "schema": unique_schema, + }, + "noaccess": { + "type": "postgres", + "threads": 4, + "host": "localhost", + "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), + "user": "noaccess", + "pass": "password", + "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), + "schema": unique_schema, + }, + }, + "target": "default", + }, + } + + def run_operation(self, macro, expect_pass=True, extra_args=None, **kwargs): + args = ["run-operation", macro] + if kwargs: + args.extend(("--args", yaml.safe_dump(kwargs))) + if extra_args: + args.extend(extra_args) + return run_dbt(args, expect_pass=expect_pass) + + def test_macro_noargs(self, project): + self.run_operation("no_args") + check_table_does_exist(project.adapter, "no_args") + + def test_macro_args(self, project): + self.run_operation("table_name_args", table_name="my_fancy_table") + check_table_does_exist(project.adapter, "my_fancy_table") + + def test_macro_exception(self, project): + self.run_operation("syntax_error", False) + + def test_macro_missing(self, project): + with pytest.raises( + DbtInternalError, + match="dbt could not find a macro with the name 'this_macro_does_not_exist' in any package", + ): + self.run_operation("this_macro_does_not_exist", False) + + def test_cannot_connect(self, project): + self.run_operation("no_args", extra_args=["--target", "noaccess"], expect_pass=False) + + def test_vacuum(self, project): + run_dbt(["run"]) + # this should succeed + self.run_operation("vacuum", table_name="model") + + def test_vacuum_ref(self, project): + run_dbt(["run"]) + # this should succeed + self.run_operation("vacuum_ref", ref_target="model") + + def test_select(self, project): + self.run_operation("select_something", name="world") + + def test_access_graph(self, project): + self.run_operation("log_graph") + + def test_print(self, project): + # Tests that calling the `print()` macro does not cause an exception + self.run_operation("print_something") + + def test_run_operation_local_macro(self, project): + pkg_macro = """ +{% macro something_cool() %} + {{ log("something cool", info=true) }} +{% endmacro %} + """ + + mkdir("pkg/macros") + + write_file(pkg_macro, "pkg/macros/something_cool.sql") + + pkg_yaml = """ +packages: + - local: pkg + """ + + write_file(pkg_yaml, "packages.yml") + + pkg_dbt_project = """ +name: 'pkg' + """ + + write_file(pkg_dbt_project, "pkg/dbt_project.yml") + + run_dbt(["deps"]) + + results, log_output = run_dbt_and_capture(["run-operation", "something_cool"]) + assert "something cool" in log_output + + results, log_output = run_dbt_and_capture(["run-operation", "pkg.something_cool"]) + assert "something cool" in log_output + + rm_dir("pkg") + rm_file("packages.yml") diff --git a/tests/functional/saved_queries/fixtures.py b/tests/functional/saved_queries/fixtures.py new file mode 100644 index 000000000..68565d82e --- /dev/null +++ b/tests/functional/saved_queries/fixtures.py @@ -0,0 +1,93 @@ +saved_query_description = """ +{% docs saved_query_description %} My SavedQuery Description {% enddocs %} +""" + +saved_queries_yml = """ +version: 2 + +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('user__ds')" + where: + - "{{ Dimension('user__ds', 'DAY') }} <= now()" + - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name +""" + +saved_query_with_extra_config_attributes_yml = """ +version: 2 + +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('user__ds')" + where: + - "{{ Dimension('user__ds', 'DAY') }} <= now()" + - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export + config: + my_random_config: 'I have this for some reason' + export_as: table +""" + +saved_query_with_export_configs_defined_at_saved_query_level_yml = """ +version: 2 + +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + config: + export_as: table + schema: my_default_export_schema + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('user__ds')" + where: + - "{{ Dimension('user__ds', 'DAY') }} <= now()" + - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export + config: + export_as: view + schema: my_custom_export_schema + - name: my_export2 +""" + +saved_query_without_export_configs_defined_yml = """ +version: 2 + +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('user__ds')" + where: + - "{{ Dimension('user__ds', 'DAY') }} <= now()" + - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export +""" diff --git a/tests/functional/saved_queries/test_configs.py b/tests/functional/saved_queries/test_configs.py new file mode 100644 index 000000000..685c54c2b --- /dev/null +++ b/tests/functional/saved_queries/test_configs.py @@ -0,0 +1,186 @@ +from dbt.contracts.graph.manifest import Manifest +from dbt.tests.util import update_config_file +from dbt_semantic_interfaces.type_enums.export_destination_type import ExportDestinationType +import pytest + +from tests.functional.configs.fixtures import BaseConfigProject +from tests.functional.dbt_runner import dbtTestRunner +from tests.functional.saved_queries.fixtures import ( + saved_queries_yml, + saved_query_description, + saved_query_with_export_configs_defined_at_saved_query_level_yml, + saved_query_with_extra_config_attributes_yml, + saved_query_without_export_configs_defined_yml, +) +from tests.functional.semantic_models.fixtures import ( + fct_revenue_sql, + metricflow_time_spine_sql, + schema_yml, +) + + +class TestSavedQueryConfigs(BaseConfigProject): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "saved-queries": { + "test": { + "test_saved_query": { + "+enabled": True, + "+export_as": ExportDestinationType.VIEW.value, + "+schema": "my_default_export_schema", + } + }, + }, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_query_with_extra_config_attributes_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_basic_saved_query_config( + self, + project, + ): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.config.export_as == ExportDestinationType.VIEW + assert saved_query.config.schema == "my_default_export_schema" + + # disable the saved_query via project config and rerun + config_patch = {"saved-queries": {"test": {"test_saved_query": {"+enabled": False}}}} + update_config_file(config_patch, project.project_root, "dbt_project.yml") + result = runner.invoke(["parse"]) + assert result.success + assert len(result.result.saved_queries) == 0 + + +class TestExportConfigsWithAdditionalProperties(BaseConfigProject): + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_queries_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_extra_config_properties_dont_break_parsing(self, project): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert len(saved_query.exports) == 1 + assert saved_query.exports[0].config.__dict__.get("my_random_config") is None + + +class TestInheritingExportConfigFromSavedQueryConfig(BaseConfigProject): + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_query_with_export_configs_defined_at_saved_query_level_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_export_config_inherits_from_saved_query(self, project): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert len(saved_query.exports) == 2 + + # assert Export `my_export` has its configs defined from itself because they should take priority + export1 = next( + (export for export in saved_query.exports if export.name == "my_export"), None + ) + assert export1 is not None + assert export1.config.export_as == ExportDestinationType.VIEW + assert export1.config.export_as != saved_query.config.export_as + assert export1.config.schema_name == "my_custom_export_schema" + assert export1.config.schema_name != saved_query.config.schema + + # assert Export `my_export` has its configs defined from the saved_query because they should take priority + export2 = next( + (export for export in saved_query.exports if export.name == "my_export2"), None + ) + assert export2 is not None + assert export2.config.export_as == ExportDestinationType.TABLE + assert export2.config.export_as == saved_query.config.export_as + assert export2.config.schema_name == "my_default_export_schema" + assert export2.config.schema_name == saved_query.config.schema + + +class TestInheritingExportConfigsFromProject(BaseConfigProject): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "saved-queries": { + "test": { + "test_saved_query": { + "+export_as": ExportDestinationType.VIEW.value, + } + }, + }, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_query_without_export_configs_defined_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_export_config_inherits_from_project( + self, + project, + ): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.config.export_as == ExportDestinationType.VIEW + + # change export's `export_as` to `TABLE` via project config + config_patch = { + "saved-queries": { + "test": {"test_saved_query": {"+export_as": ExportDestinationType.TABLE.value}} + } + } + update_config_file(config_patch, project.project_root, "dbt_project.yml") + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.config.export_as == ExportDestinationType.TABLE diff --git a/tests/functional/saved_queries/test_saved_query_build.py b/tests/functional/saved_queries/test_saved_query_build.py new file mode 100644 index 000000000..19787e71a --- /dev/null +++ b/tests/functional/saved_queries/test_saved_query_build.py @@ -0,0 +1,41 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.saved_queries.fixtures import ( + saved_queries_yml, + saved_query_description, +) +from tests.functional.semantic_models.fixtures import ( + fct_revenue_sql, + metricflow_time_spine_sql, + schema_yml, +) + + +class TestSavedQueryBuildNoOp: + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_queries_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + @pytest.fixture(scope="class") + def packages(self): + return """ +packages: + - package: dbt-labs/dbt_utils + version: 1.1.1 +""" + + def test_semantic_model_parsing(self, project): + run_dbt(["deps"]) + result = run_dbt(["build"]) + assert len(result.results) == 2 + assert "test_saved_query" not in [r.node.name for r in result.results] + result = run_dbt(["build", "--include-saved-query"]) + assert len(result.results) == 3 + assert "test_saved_query" in [r.node.name for r in result.results] diff --git a/tests/functional/saved_queries/test_saved_query_parsing.py b/tests/functional/saved_queries/test_saved_query_parsing.py new file mode 100644 index 000000000..73f63f1ea --- /dev/null +++ b/tests/functional/saved_queries/test_saved_query_parsing.py @@ -0,0 +1,113 @@ +from typing import List + +from dbt.contracts.graph.manifest import Manifest +from dbt.tests.util import write_file +from dbt_common.events.base_types import BaseEvent +from dbt_semantic_interfaces.type_enums.export_destination_type import ExportDestinationType +import pytest + +from tests.functional.dbt_runner import dbtTestRunner +from tests.functional.saved_queries.fixtures import ( + saved_queries_yml, + saved_query_description, +) +from tests.functional.semantic_models.fixtures import ( + fct_revenue_sql, + metricflow_time_spine_sql, + schema_yml, +) + + +class TestSavedQueryParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_queries_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_semantic_model_parsing(self, project): + runner = dbtTestRunner() + result = runner.invoke(["parse", "--no-partial-parse"]) + assert result.success + assert isinstance(result.result, Manifest) + manifest = result.result + assert len(manifest.saved_queries) == 1 + saved_query = manifest.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.name == "test_saved_query" + assert len(saved_query.query_params.metrics) == 1 + assert len(saved_query.query_params.group_by) == 1 + assert len(saved_query.query_params.where.where_filters) == 2 + assert len(saved_query.depends_on.nodes) == 1 + assert saved_query.description == "My SavedQuery Description" + assert len(saved_query.exports) == 1 + assert saved_query.exports[0].name == "my_export" + assert saved_query.exports[0].config.alias == "my_export_alias" + assert saved_query.exports[0].config.export_as == ExportDestinationType.TABLE + assert saved_query.exports[0].config.schema_name == "my_export_schema_name" + + def test_saved_query_error(self, project): + error_schema_yml = saved_queries_yml.replace("simple_metric", "metric_not_found") + write_file(error_schema_yml, project.project_root, "models", "saved_queries.yml") + events: List[BaseEvent] = [] + runner = dbtTestRunner(callbacks=[events.append]) + + result = runner.invoke(["parse", "--no-partial-parse"]) + assert not result.success + validation_errors = [e for e in events if e.info.name == "MainEncounteredError"] + assert validation_errors + + +class TestSavedQueryPartialParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_queries_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_saved_query_metrics_changed(self, project): + # First, use the default saved_queries.yml to define our saved_queries, and + # run the dbt parse command + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + + # Next, modify the default saved_queries.yml to change a detail of the saved + # query. + modified_saved_queries_yml = saved_queries_yml.replace("simple_metric", "txn_revenue") + write_file(modified_saved_queries_yml, project.project_root, "models", "saved_queries.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Finally, verify that the manifest reflects the partially parsed change + manifest = result.result + saved_query = manifest.saved_queries["saved_query.test.test_saved_query"] + assert len(saved_query.metrics) == 1 + assert saved_query.metrics[0] == "txn_revenue" + + def test_saved_query_deleted_partial_parsing(self, project): + # First, use the default saved_queries.yml to define our saved_query, and + # run the dbt parse command + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + assert "saved_query.test.test_saved_query" in result.result.saved_queries + + # Next, modify the default saved_queries.yml to remove the saved query. + write_file("", project.project_root, "models", "saved_queries.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Finally, verify that the manifest reflects the deletion + assert "saved_query.test.test_saved_query" not in result.result.saved_queries diff --git a/tests/functional/schema/fixtures/macros.py b/tests/functional/schema/fixtures/macros.py new file mode 100644 index 000000000..2ac49c9f6 --- /dev/null +++ b/tests/functional/schema/fixtures/macros.py @@ -0,0 +1,37 @@ +_CUSTOM_MACRO = """ +{% macro generate_schema_name(schema_name, node) %} + + {{ schema_name }}_{{ target.schema }}_macro + +{% endmacro %} +""" + +_CUSTOM_MACRO_W_CONFIG = """ +{% macro generate_schema_name(schema_name, node) %} + + {{ node.config['schema'] }}_{{ target.schema }}_macro + +{% endmacro %} +""" + +_CUSTOM_MACRO_MULTI_SCHEMA = """ +{% macro generate_alias_name(custom_alias_name=none, node=none) -%} + {%- set node_name = node.name | trim -%} + {%- set split_name = node_name.split('.') -%} + {%- set n_parts = split_name | length -%} + + {{ split_name[1] if n_parts>1 else node_name }} + +{%- endmacro -%} + + +{% macro generate_schema_name(custom_schema_name=none, node=none) -%} + {%- set default_schema = target.schema -%} + {%- set node_name = node.name | trim -%} + {%- set split_name = node_name.split('.') -%} + {%- set n_parts = split_name | length -%} + + {{ split_name[0] if n_parts>1 else default_schema }} + +{%- endmacro -%} +""" diff --git a/tests/functional/schema/fixtures/sql.py b/tests/functional/schema/fixtures/sql.py new file mode 100644 index 000000000..ca9fe2a0c --- /dev/null +++ b/tests/functional/schema/fixtures/sql.py @@ -0,0 +1,128 @@ +_TABLE_ONE = """ +select * from {{ ref('seed') }} +""" +_TABLE_ONE_DOT_MODEL_SCHEMA = "first_schema" +_TABLE_ONE_DOT_MODEL_NAME = f"{_TABLE_ONE_DOT_MODEL_SCHEMA}.view_1" +_TABLE_ONE_DOT_MODEL = """ +select * from {{ target.schema }}.seed +""" + +_TABLE_TWO_SCHEMA = "custom" +_TABLE_TWO = ( + """ +{{ config(schema='""" + + _TABLE_TWO_SCHEMA + + """') }} +select * from {{ ref('view_1') }} +""" +) +_TABLE_TWO_DOT_MODEL_SCHEMA = "second_schema" +_TABLE_TWO_DOT_MODEL_NAME = f"{_TABLE_TWO_DOT_MODEL_SCHEMA}.view_2" +_TABLE_TWO_DOT_MODEL = "select * from {{ ref('" + _TABLE_ONE_DOT_MODEL_NAME + "') }}" + +_TABLE_THREE_SCHEMA = "test" +_TABLE_THREE = ( + """ +{{ config(materialized='table', schema='""" + + _TABLE_THREE_SCHEMA + + """') }} + + +with v1 as ( + + select * from{{ ref('view_1') }} + +), + +v2 as ( + + select * from {{ ref('view_2') }} + +), + +combined as ( + + select last_name from v1 + union all + select last_name from v2 + +) + +select + last_name, + count(*) as count + +from combined +group by 1 +""" +) + +_TABLE_THREE_DOT_MODEL = """ +{{ config(materialized='table') }} + + +with v1 as ( + + select * from {{ ref('first_schema.view_1') }} + +), + +v2 as ( + + select * from {{ ref('second_schema.view_2') }} + +), + +combined as ( + + select last_name from v1 + union all + select last_name from v2 + +) + +select + last_name, + count(*) as count + +from combined +group by 1 +""" + +_SEED_CSV = """id,first_name,last_name,email,gender,ip_address +1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 +2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 +3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243""" + +_CUSTOM_CONFIG = """ +{{ config(schema='custom') }} + +select * from {{ ref('view_1') }} +""" + +_VALIDATION_SQL = """ +drop table if exists {database}.{schema}.seed cascade; +create table {database}.{schema}.seed ( + id BIGSERIAL PRIMARY KEY, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20) +); + +drop table if exists {database}.{schema}.agg cascade; +create table {database}.{schema}.agg ( + last_name VARCHAR(50), + count BIGINT +); + + +insert into {database}.{schema}.seed (first_name, last_name, email, gender, ip_address) values +('Jack', 'Hunter', 'jhunter0@pbs.org', 'Male', '59.80.20.168'), +('Kathryn', 'Walker', 'kwalker1@ezinearticles.com', 'Female', '194.121.179.35'), +('Gerald', 'Ryan', 'gryan2@com.com', 'Male', '11.3.212.243'); + +insert into {database}.{schema}.agg (last_name, count) values +('Hunter', 2), ('Walker', 2), ('Ryan', 2); +""" diff --git a/tests/functional/schema/test_custom_schema.py b/tests/functional/schema/test_custom_schema.py new file mode 100644 index 000000000..7679446d0 --- /dev/null +++ b/tests/functional/schema/test_custom_schema.py @@ -0,0 +1,220 @@ +from dbt.tests.util import check_relations_equal, run_dbt +import pytest + +from tests.functional.schema.fixtures.macros import ( + _CUSTOM_MACRO, + _CUSTOM_MACRO_MULTI_SCHEMA, + _CUSTOM_MACRO_W_CONFIG, +) +from tests.functional.schema.fixtures.sql import ( + _SEED_CSV, + _TABLE_ONE, + _TABLE_ONE_DOT_MODEL_NAME, + _TABLE_ONE_DOT_MODEL_SCHEMA, + _TABLE_THREE, + _TABLE_THREE_DOT_MODEL, + _TABLE_THREE_SCHEMA, + _TABLE_TWO, + _TABLE_TWO_DOT_MODEL, + _TABLE_TWO_DOT_MODEL_NAME, + _TABLE_TWO_DOT_MODEL_SCHEMA, + _TABLE_TWO_SCHEMA, + _VALIDATION_SQL, +) + + +_CUSTOM_SCHEMA = "dbt_test" + + +class BaseTestCustomSchema: + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": _SEED_CSV} + + @pytest.fixture(scope="class") + def models(self): + return { + "view_1.sql": _TABLE_ONE, + "view_2.sql": _TABLE_TWO, + "table_3.sql": _TABLE_THREE, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"models": {"schema": _CUSTOM_SCHEMA}} + + +class TestCustomSchema(BaseTestCustomSchema): + def test__postgres_handles__custom_schema_with_no_prefix(self, project, macros): + project.run_sql(_VALIDATION_SQL) + run_dbt(["seed"]) + results = run_dbt(["run"]) + assert len(results) == 3 + table_results = {r.node.name: r.node.schema for r in results.results} + assert table_results["view_1"] == f"{project.test_schema}_{_CUSTOM_SCHEMA}" + assert table_results["view_2"] == f"{project.test_schema}_{_TABLE_TWO_SCHEMA}" + assert table_results["table_3"] == f"{project.test_schema}_{_TABLE_THREE_SCHEMA}" + check_relations_equal( + adapter=project.adapter, + relation_names=("seed", f"{project.test_schema}_{_CUSTOM_SCHEMA}.view_1"), + ) + check_relations_equal( + adapter=project.adapter, + relation_names=("seed", f"{project.test_schema}_{_TABLE_TWO_SCHEMA}.view_2"), + ) + check_relations_equal( + adapter=project.adapter, + relation_names=("agg", f"{project.test_schema}_{_TABLE_THREE_SCHEMA}.table_3"), + ) + + +class TestCustomSchemaWithCustomMacro(BaseTestCustomSchema): + @pytest.fixture(scope="class") + def macros(self): + return { + "custom_macro.sql": _CUSTOM_MACRO, + } + + def test__postgres_handles__custom_schema_with_custom_macro(self, project, macros): + project.run_sql(_VALIDATION_SQL) + run_dbt(["seed"]) + results = run_dbt(["run"]) + assert len(results) == 3 + table_results = {r.node.name: r.node.schema for r in results.results} + assert table_results["view_1"] == f"{_CUSTOM_SCHEMA}_{project.test_schema}_macro" + assert table_results["view_2"] == f"{_TABLE_TWO_SCHEMA}_{project.test_schema}_macro" + assert table_results["table_3"] == f"{_TABLE_THREE_SCHEMA}_{project.test_schema}_macro" + check_relations_equal( + adapter=project.adapter, + relation_names=("seed", f"{_CUSTOM_SCHEMA}_{project.test_schema}_macro.view_1"), + ) + check_relations_equal( + adapter=project.adapter, + relation_names=("seed", f"{_TABLE_TWO_SCHEMA}_{project.test_schema}_macro.view_2"), + ) + check_relations_equal( + adapter=project.adapter, + relation_names=("agg", f"{_TABLE_THREE_SCHEMA}_{project.test_schema}_macro.table_3"), + ) + + +class TestCustomSchemaWithPrefix(BaseTestCustomSchema): + @pytest.fixture(scope="class") + def macros(self): + return { + "custom_macro.sql": _CUSTOM_MACRO_W_CONFIG, + } + + def test__postgres__custom_schema_with_prefix(self, project, macros): + project.run_sql(_VALIDATION_SQL) + run_dbt(["seed"]) + results = run_dbt(["run"]) + assert len(results) == 3 + table_results = {r.node.name: r.node.schema for r in results.results} + assert table_results["view_1"] == f"{_CUSTOM_SCHEMA}_{project.test_schema}_macro" + assert table_results["view_2"] == f"{_TABLE_TWO_SCHEMA}_{project.test_schema}_macro" + assert table_results["table_3"] == f"{_TABLE_THREE_SCHEMA}_{project.test_schema}_macro" + check_relations_equal( + adapter=project.adapter, + relation_names=("seed", f"{_CUSTOM_SCHEMA}_{project.test_schema}_macro.view_1"), + ) + check_relations_equal( + adapter=project.adapter, + relation_names=("seed", f"{_TABLE_TWO_SCHEMA}_{project.test_schema}_macro.view_2"), + ) + check_relations_equal( + adapter=project.adapter, + relation_names=("agg", f"{_TABLE_THREE_SCHEMA}_{project.test_schema}_macro.table_3"), + ) + + +class TestCustomSchemaWithPrefixAndDispatch(BaseTestCustomSchema): + @pytest.fixture(scope="class") + def macros(self): + return { + "custom_macro.sql": _CUSTOM_MACRO_W_CONFIG, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": {"schema": _CUSTOM_SCHEMA}, + "dispatch": [ + { + "macro_namespace": "dbt", + "search_order": ["test", "package_macro_overrides", "dbt"], + } + ], + } + + def test__postgres__custom_schema_with_prefix_and_dispatch( + self, project, macros, project_config_update + ): + project.run_sql(_VALIDATION_SQL) + run_dbt(["deps"]) + run_dbt(["seed"]) + results = run_dbt(["run"]) + assert len(results) == 3 + table_results = {r.node.name: r.node.schema for r in results.results} + assert table_results["view_1"] == f"{_CUSTOM_SCHEMA}_{project.test_schema}_macro" + assert table_results["view_2"] == f"{_TABLE_TWO_SCHEMA}_{project.test_schema}_macro" + assert table_results["table_3"] == f"{_TABLE_THREE_SCHEMA}_{project.test_schema}_macro" + check_relations_equal( + adapter=project.adapter, + relation_names=("seed", f"{_CUSTOM_SCHEMA}_{project.test_schema}_macro.view_1"), + ) + check_relations_equal( + adapter=project.adapter, + relation_names=("seed", f"{_TABLE_TWO_SCHEMA}_{project.test_schema}_macro.view_2"), + ) + check_relations_equal( + adapter=project.adapter, + relation_names=("agg", f"{_TABLE_THREE_SCHEMA}_{project.test_schema}_macro.table_3"), + ) + + +class TestCustomSchemaWithCustomMacroFromModelName(BaseTestCustomSchema): + @pytest.fixture(scope="class") + def macros(self): + return { + "custom_macro.sql": _CUSTOM_MACRO_MULTI_SCHEMA, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": {"schema": _CUSTOM_SCHEMA}, + "seeds": { + "quote_columns": False, + }, + } + + @pytest.fixture(scope="class") + def models(self): + return { + f"{_TABLE_ONE_DOT_MODEL_NAME}.sql": _TABLE_ONE, + f"{_TABLE_TWO_DOT_MODEL_NAME}.sql": _TABLE_TWO_DOT_MODEL, + "table_3.sql": _TABLE_THREE_DOT_MODEL, + } + + def test__postgres__custom_schema_from_model_name( + self, project, macros, project_config_update + ): + project.run_sql(_VALIDATION_SQL) + run_dbt(["seed"]) + results = run_dbt(["run"]) + assert len(results) == 3 + table_results = {r.node.name: r.node.schema for r in results.results} + + assert table_results[_TABLE_ONE_DOT_MODEL_NAME] == _TABLE_ONE_DOT_MODEL_SCHEMA + assert table_results[_TABLE_TWO_DOT_MODEL_NAME] == _TABLE_TWO_DOT_MODEL_SCHEMA + assert table_results["table_3"] == f"{project.test_schema}" + check_relations_equal( + adapter=project.adapter, relation_names=("seed", _TABLE_ONE_DOT_MODEL_NAME) + ) + check_relations_equal( + adapter=project.adapter, relation_names=("seed", _TABLE_TWO_DOT_MODEL_NAME) + ) + check_relations_equal( + adapter=project.adapter, relation_names=("agg", f"{project.test_schema}.table_3") + ) diff --git a/tests/functional/schema_tests/data/seed.sql b/tests/functional/schema_tests/data/seed.sql new file mode 100644 index 000000000..8f1801504 --- /dev/null +++ b/tests/functional/schema_tests/data/seed.sql @@ -0,0 +1,117 @@ +create table {schema}.seed ( + favorite_color VARCHAR(10), + id INTEGER, + first_name VARCHAR(11), + email VARCHAR(31), + + net_worth NUMERIC(12, 2) DEFAULT '100.00', + fav_number NUMERIC DEFAULT '3.14159265', + + ip_address VARCHAR(15), + updated_at TIMESTAMP WITHOUT TIME ZONE +); + + +INSERT INTO {schema}.seed + ("favorite_color", "id","first_name","email","ip_address","updated_at") +VALUES + ('blue', 1,'Larry',null,'69.135.206.194','2008-09-12 19:08:31'), + ('blue', 2,'Larry',null,'64.210.133.162','1978-05-09 04:15:14'), + ('blue', 3,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114','2011-10-16 04:07:57'), + ('blue', 4,'Sandra','sgeorge3@livejournal.com','229.235.252.98','1973-07-19 10:52:43'), + ('blue', 5,'Fred','fwoods4@google.cn','78.229.170.124','2012-09-30 16:38:29'), + ('blue', 6,'Stephen','shanson5@livejournal.com','182.227.157.105','1995-11-07 21:40:50'), + ('blue', 7,'William','wmartinez6@upenn.edu','135.139.249.50','1982-09-05 03:11:59'), + ('blue', 8,'Jessica','jlong7@hao123.com','203.62.178.210','1991-10-16 11:03:15'), + ('blue', 9,'Douglas','dwhite8@tamu.edu','178.187.247.1','1979-10-01 09:49:48'), + ('blue', 10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249','2011-05-26 07:45:49'), + ('blue', 11,'Ralph','rfieldsa@home.pl','55.152.163.149','1972-11-18 19:06:11'), + ('blue', 12,'Louise','lnicholsb@samsung.com','141.116.153.154','2014-11-25 20:56:14'), + ('blue', 13,'Clarence','cduncanc@sfgate.com','81.171.31.133','2011-11-17 07:02:36'), + ('blue', 14,'Daniel','dfranklind@omniture.com','8.204.211.37','1980-09-13 00:09:04'), + ('blue', 15,'Katherine','klanee@auda.org.au','176.96.134.59','1997-08-22 19:36:56'), + ('blue', 16,'Billy','bwardf@wikia.com','214.108.78.85','2003-10-19 02:14:47'), + ('blue', 17,'Annie','agarzag@ocn.ne.jp','190.108.42.70','1988-10-28 15:12:35'), + ('blue', 18,'Shirley','scolemanh@fastcompany.com','109.251.164.84','1988-08-24 10:50:57'), + ('blue', 19,'Roger','rfrazieri@scribd.com','38.145.218.108','1985-12-31 15:17:15'), + ('blue', 20,'Lillian','lstanleyj@goodreads.com','47.57.236.17','1970-06-08 02:09:05'), + ('blue', 21,'Aaron','arodriguezk@nps.gov','205.245.118.221','1985-10-11 23:07:49'), + ('blue', 22,'Patrick','pparkerl@techcrunch.com','19.8.100.182','2006-03-29 12:53:56'), + ('blue', 23,'Phillip','pmorenom@intel.com','41.38.254.103','2011-11-07 15:35:43'), + ('blue', 24,'Henry','hgarcian@newsvine.com','1.191.216.252','2008-08-28 08:30:44'), + ('blue', 25,'Irene','iturnero@opera.com','50.17.60.190','1994-04-01 07:15:02'), + ('blue', 26,'Andrew','adunnp@pen.io','123.52.253.176','2000-11-01 06:03:25'), + ('blue', 27,'David','dgutierrezq@wp.com','238.23.203.42','1988-01-25 07:29:18'), + ('blue', 28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185','1983-01-01 13:36:37'), + ('blue', 29,'Evelyn','epetersons@gizmodo.com','32.80.46.119','1979-07-16 17:24:12'), + ('blue', 30,'Tammy','tmitchellt@purevolume.com','249.246.167.88','2001-04-03 10:00:23'), + ('blue', 31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47','1986-02-11 21:35:50'), + ('blue', 32,'Earl','eortizv@opera.com','166.47.248.240','1996-07-06 08:16:27'), + ('blue', 33,'Juan','jgordonw@sciencedirect.com','71.77.2.200','1987-01-31 03:46:44'), + ('blue', 34,'Diane','dhowellx@nyu.edu','140.94.133.12','1994-06-11 02:30:05'), + ('blue', 35,'Randy','rkennedyy@microsoft.com','73.255.34.196','2005-05-26 20:28:39'), + ('blue', 36,'Janice','jriveraz@time.com','22.214.227.32','1990-02-09 04:16:52'), + ('blue', 37,'Laura','lperry10@diigo.com','159.148.145.73','2015-03-17 05:59:25'), + ('blue', 38,'Gary','gray11@statcounter.com','40.193.124.56','1970-01-27 10:04:51'), + ('blue', 39,'Jesse','jmcdonald12@typepad.com','31.7.86.103','2009-03-14 08:14:29'), + ('blue', 40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239','1993-05-21 14:08:54'), + ('blue', 41,'Scott','smoore14@archive.org','38.238.46.83','1980-08-30 11:16:56'), + ('blue', 42,'Phillip','pevans15@cisco.com','158.234.59.34','2011-12-15 23:26:31'), + ('blue', 43,'Steven','sriley16@google.ca','90.247.57.68','2011-10-29 19:03:28'), + ('blue', 44,'Deborah','dbrown17@hexun.com','179.125.143.240','1995-04-10 14:36:07'), + ('blue', 45,'Lori','lross18@ow.ly','64.80.162.180','1980-12-27 16:49:15'), + ('blue', 46,'Sean','sjackson19@tumblr.com','240.116.183.69','1988-06-12 21:24:45'), + ('blue', 47,'Terry','tbarnes1a@163.com','118.38.213.137','1997-09-22 16:43:19'), + ('blue', 48,'Dorothy','dross1b@ebay.com','116.81.76.49','2005-02-28 13:33:24'), + ('blue', 49,'Samuel','swashington1c@house.gov','38.191.253.40','1989-01-19 21:15:48'), + ('blue', 50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174','2007-08-11 10:21:49'), + ('green', 51,'Wayne','whudson1e@princeton.edu','90.61.24.102','1983-07-03 16:58:12'), + ('green', 52,'Rose','rjames1f@plala.or.jp','240.83.81.10','1995-06-08 11:46:23'), + ('green', 53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145','2016-09-19 14:45:51'), + ('green', 54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94','1976-08-17 11:26:19'), + ('green', 55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45','2006-05-27 16:51:40'), + ('green', 56,'Johnny','jvasquez1j@trellian.com','118.202.238.23','1975-11-17 08:42:32'), + ('green', 57,'Patrick','pramirez1k@tamu.edu','231.25.153.198','1997-08-06 11:51:09'), + ('green', 58,'Helen','hlarson1l@prweb.com','8.40.21.39','1993-08-04 19:53:40'), + ('green', 59,'Patricia','pspencer1m@gmpg.org','212.198.40.15','1977-08-03 16:37:27'), + ('green', 60,'Joseph','jspencer1n@marriott.com','13.15.63.238','2005-07-23 20:22:06'), + ('green', 61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190','1976-05-19 21:47:44'), + ('green', 62,'Joan','jwebb1p@google.ru','105.229.170.71','1972-09-07 17:53:47'), + ('green', 63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244','2000-01-01 22:33:37'), + ('green', 64,'Katherine','khunter1r@smh.com.au','248.168.205.32','1991-01-09 06:40:24'), + ('green', 65,'Laura','lvasquez1s@wiley.com','128.129.115.152','1997-10-23 12:04:56'), + ('green', 66,'Juan','jdunn1t@state.gov','44.228.124.51','2004-11-10 05:07:35'), + ('green', 67,'Judith','jholmes1u@wiley.com','40.227.179.115','1977-08-02 17:01:45'), + ('green', 68,'Beverly','bbaker1v@wufoo.com','208.34.84.59','2016-03-06 20:07:23'), + ('green', 69,'Lawrence','lcarr1w@flickr.com','59.158.212.223','1988-09-13 06:07:21'), + ('green', 70,'Gloria','gwilliams1x@mtv.com','245.231.88.33','1995-03-18 22:32:46'), + ('green', 71,'Steven','ssims1y@cbslocal.com','104.50.58.255','2001-08-05 21:26:20'), + ('green', 72,'Betty','bmills1z@arstechnica.com','103.177.214.220','1981-12-14 21:26:54'), + ('green', 73,'Mildred','mfuller20@prnewswire.com','151.158.8.130','2000-04-19 10:13:55'), + ('green', 74,'Donald','dday21@icq.com','9.178.102.255','1972-12-03 00:58:24'), + ('green', 75,'Eric','ethomas22@addtoany.com','85.2.241.227','1992-11-01 05:59:30'), + ('green', 76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36','1985-10-24 06:50:01'), + ('green', 77,'Maria','mmartinez24@amazonaws.com','143.189.167.135','2005-10-05 05:17:42'), + ('green', 78,'Harry','hburton25@youtube.com','156.47.176.237','1978-03-26 05:53:33'), + ('green', 79,'Kevin','klawrence26@hao123.com','79.136.183.83','1994-10-12 04:38:52'), + ('green', 80,'David','dhall27@prweb.com','133.149.172.153','1976-12-15 16:24:24'), + ('green', 81,'Kathy','kperry28@twitter.com','229.242.72.228','1979-03-04 02:58:56'), + ('green', 82,'Adam','aprice29@elegantthemes.com','13.145.21.10','1982-11-07 11:46:59'), + ('green', 83,'Brandon','bgriffin2a@va.gov','73.249.128.212','2013-10-30 05:30:36'), + ('green', 84,'Henry','hnguyen2b@discovery.com','211.36.214.242','1985-01-09 06:37:27'), + ('green', 85,'Eric','esanchez2c@edublogs.org','191.166.188.251','2004-05-01 23:21:42'), + ('green', 86,'Jason','jlee2d@jimdo.com','193.92.16.182','1973-01-08 09:05:39'), + ('green', 87,'Diana','drichards2e@istockphoto.com','19.130.175.245','1994-10-05 22:50:49'), + ('green', 88,'Andrea','awelch2f@abc.net.au','94.155.233.96','2002-04-26 08:41:44'), + ('green', 89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111','2003-08-25 07:56:39'), + ('green', 90,'Jane','jsims2h@seesaa.net','43.4.220.135','1987-03-20 20:39:04'), + ('green', 91,'Larry','lgrant2i@si.edu','97.126.79.34','2000-09-07 20:26:19'), + ('green', 92,'Louis','ldean2j@prnewswire.com','37.148.40.127','2011-09-16 20:12:14'), + ('green', 93,'Jennifer','jcampbell2k@xing.com','38.106.254.142','1988-07-15 05:06:49'), + ('green', 94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187','2009-12-15 06:16:54'), + ('green', 95,'Lori','lstevens2m@icq.com','181.250.181.58','1984-10-28 03:29:19'), + ('green', 96,'Judy','jsimpson2n@marriott.com','180.121.239.219','1986-02-07 15:18:10'), + ('green', 97,'Phillip','phoward2o@usa.gov','255.247.0.175','2002-12-26 08:44:45'), + ('green', 98,'Gloria','gwalker2p@usa.gov','156.140.7.128','1997-10-04 07:58:58'), + ('green', 99,'Paul','pjohnson2q@umn.edu','183.59.198.197','1991-11-14 12:33:55'), + ('green', 100,'Frank','fgreene2r@blogspot.com','150.143.68.121','2010-06-12 23:55:39'); diff --git a/tests/functional/schema_tests/data/seed_failure.sql b/tests/functional/schema_tests/data/seed_failure.sql new file mode 100644 index 000000000..f68c4591a --- /dev/null +++ b/tests/functional/schema_tests/data/seed_failure.sql @@ -0,0 +1,116 @@ +create table {schema}.seed_failure ( + favorite_color VARCHAR(10), + id INTEGER, + first_name VARCHAR(11), + email VARCHAR(31), + ip_address VARCHAR(15), + updated_at TIMESTAMP WITHOUT TIME ZONE +); + + +INSERT INTO {schema}.seed_failure + ("favorite_color", "id","first_name","email","ip_address","updated_at") +VALUES + -- unaccepted 'red' favorite_color + ('red', 1,'Larry','lking0@miitbeian.gov.cn','69.135.206.194','2008-09-12 19:08:31'), + -- dupicate unique field (id=1) + ('blue', 1,'Larry','lperkins1@toplist.cz','64.210.133.162','1978-05-09 04:15:14'), + -- null not_null field (id) + ('blue', null,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114','2011-10-16 04:07:57'), + ('blue', 4,'Sandra','sgeorge3@livejournal.com','229.235.252.98','1973-07-19 10:52:43'), + ('blue', 5,'Fred','fwoods4@google.cn','78.229.170.124','2012-09-30 16:38:29'), + ('blue', 6,'Stephen','shanson5@livejournal.com','182.227.157.105','1995-11-07 21:40:50'), + ('blue', 7,'William','wmartinez6@upenn.edu','135.139.249.50','1982-09-05 03:11:59'), + ('blue', 8,'Jessica','jlong7@hao123.com','203.62.178.210','1991-10-16 11:03:15'), + ('blue', 9,'Douglas','dwhite8@tamu.edu','178.187.247.1','1979-10-01 09:49:48'), + ('blue', 10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249','2011-05-26 07:45:49'), + ('blue', 11,'Ralph','rfieldsa@home.pl','55.152.163.149','1972-11-18 19:06:11'), + ('blue', 12,'Louise','lnicholsb@samsung.com','141.116.153.154','2014-11-25 20:56:14'), + ('blue', 13,'Clarence','cduncanc@sfgate.com','81.171.31.133','2011-11-17 07:02:36'), + ('blue', 14,'Daniel','dfranklind@omniture.com','8.204.211.37','1980-09-13 00:09:04'), + ('blue', 15,'Katherine','klanee@auda.org.au','176.96.134.59','1997-08-22 19:36:56'), + ('blue', 16,'Billy','bwardf@wikia.com','214.108.78.85','2003-10-19 02:14:47'), + ('blue', 17,'Annie','agarzag@ocn.ne.jp','190.108.42.70','1988-10-28 15:12:35'), + ('blue', 18,'Shirley','scolemanh@fastcompany.com','109.251.164.84','1988-08-24 10:50:57'), + ('blue', 19,'Roger','rfrazieri@scribd.com','38.145.218.108','1985-12-31 15:17:15'), + ('blue', 20,'Lillian','lstanleyj@goodreads.com','47.57.236.17','1970-06-08 02:09:05'), + ('blue', 21,'Aaron','arodriguezk@nps.gov','205.245.118.221','1985-10-11 23:07:49'), + ('blue', 22,'Patrick','pparkerl@techcrunch.com','19.8.100.182','2006-03-29 12:53:56'), + ('blue', 23,'Phillip','pmorenom@intel.com','41.38.254.103','2011-11-07 15:35:43'), + ('blue', 24,'Henry','hgarcian@newsvine.com','1.191.216.252','2008-08-28 08:30:44'), + ('blue', 25,'Irene','iturnero@opera.com','50.17.60.190','1994-04-01 07:15:02'), + ('blue', 26,'Andrew','adunnp@pen.io','123.52.253.176','2000-11-01 06:03:25'), + ('blue', 27,'David','dgutierrezq@wp.com','238.23.203.42','1988-01-25 07:29:18'), + ('blue', 28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185','1983-01-01 13:36:37'), + ('blue', 29,'Evelyn','epetersons@gizmodo.com','32.80.46.119','1979-07-16 17:24:12'), + ('blue', 30,'Tammy','tmitchellt@purevolume.com','249.246.167.88','2001-04-03 10:00:23'), + ('blue', 31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47','1986-02-11 21:35:50'), + ('blue', 32,'Earl','eortizv@opera.com','166.47.248.240','1996-07-06 08:16:27'), + ('blue', 33,'Juan','jgordonw@sciencedirect.com','71.77.2.200','1987-01-31 03:46:44'), + ('blue', 34,'Diane','dhowellx@nyu.edu','140.94.133.12','1994-06-11 02:30:05'), + ('blue', 35,'Randy','rkennedyy@microsoft.com','73.255.34.196','2005-05-26 20:28:39'), + ('blue', 36,'Janice','jriveraz@time.com','22.214.227.32','1990-02-09 04:16:52'), + ('blue', 37,'Laura','lperry10@diigo.com','159.148.145.73','2015-03-17 05:59:25'), + ('blue', 38,'Gary','gray11@statcounter.com','40.193.124.56','1970-01-27 10:04:51'), + ('blue', 39,'Jesse','jmcdonald12@typepad.com','31.7.86.103','2009-03-14 08:14:29'), + ('blue', 40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239','1993-05-21 14:08:54'), + ('blue', 41,'Scott','smoore14@archive.org','38.238.46.83','1980-08-30 11:16:56'), + ('blue', 42,'Phillip','pevans15@cisco.com','158.234.59.34','2011-12-15 23:26:31'), + ('blue', 43,'Steven','sriley16@google.ca','90.247.57.68','2011-10-29 19:03:28'), + ('blue', 44,'Deborah','dbrown17@hexun.com','179.125.143.240','1995-04-10 14:36:07'), + ('blue', 45,'Lori','lross18@ow.ly','64.80.162.180','1980-12-27 16:49:15'), + ('blue', 46,'Sean','sjackson19@tumblr.com','240.116.183.69','1988-06-12 21:24:45'), + ('blue', 47,'Terry','tbarnes1a@163.com','118.38.213.137','1997-09-22 16:43:19'), + ('blue', 48,'Dorothy','dross1b@ebay.com','116.81.76.49','2005-02-28 13:33:24'), + ('blue', 49,'Samuel','swashington1c@house.gov','38.191.253.40','1989-01-19 21:15:48'), + ('blue', 50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174','2007-08-11 10:21:49'), + ('green', 51,'Wayne','whudson1e@princeton.edu','90.61.24.102','1983-07-03 16:58:12'), + ('green', 52,'Rose','rjames1f@plala.or.jp','240.83.81.10','1995-06-08 11:46:23'), + ('green', 53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145','2016-09-19 14:45:51'), + ('green', 54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94','1976-08-17 11:26:19'), + ('green', 55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45','2006-05-27 16:51:40'), + ('green', 56,'Johnny','jvasquez1j@trellian.com','118.202.238.23','1975-11-17 08:42:32'), + ('green', 57,'Patrick','pramirez1k@tamu.edu','231.25.153.198','1997-08-06 11:51:09'), + ('green', 58,'Helen','hlarson1l@prweb.com','8.40.21.39','1993-08-04 19:53:40'), + ('green', 59,'Patricia','pspencer1m@gmpg.org','212.198.40.15','1977-08-03 16:37:27'), + ('green', 60,'Joseph','jspencer1n@marriott.com','13.15.63.238','2005-07-23 20:22:06'), + ('green', 61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190','1976-05-19 21:47:44'), + ('green', 62,'Joan','jwebb1p@google.ru','105.229.170.71','1972-09-07 17:53:47'), + ('green', 63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244','2000-01-01 22:33:37'), + ('green', 64,'Katherine','khunter1r@smh.com.au','248.168.205.32','1991-01-09 06:40:24'), + ('green', 65,'Laura','lvasquez1s@wiley.com','128.129.115.152','1997-10-23 12:04:56'), + ('green', 66,'Juan','jdunn1t@state.gov','44.228.124.51','2004-11-10 05:07:35'), + ('green', 67,'Judith','jholmes1u@wiley.com','40.227.179.115','1977-08-02 17:01:45'), + ('green', 68,'Beverly','bbaker1v@wufoo.com','208.34.84.59','2016-03-06 20:07:23'), + ('green', 69,'Lawrence','lcarr1w@flickr.com','59.158.212.223','1988-09-13 06:07:21'), + ('green', 70,'Gloria','gwilliams1x@mtv.com','245.231.88.33','1995-03-18 22:32:46'), + ('green', 71,'Steven','ssims1y@cbslocal.com','104.50.58.255','2001-08-05 21:26:20'), + ('green', 72,'Betty','bmills1z@arstechnica.com','103.177.214.220','1981-12-14 21:26:54'), + ('green', 73,'Mildred','mfuller20@prnewswire.com','151.158.8.130','2000-04-19 10:13:55'), + ('green', 74,'Donald','dday21@icq.com','9.178.102.255','1972-12-03 00:58:24'), + ('green', 75,'Eric','ethomas22@addtoany.com','85.2.241.227','1992-11-01 05:59:30'), + ('green', 76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36','1985-10-24 06:50:01'), + ('green', 77,'Maria','mmartinez24@amazonaws.com','143.189.167.135','2005-10-05 05:17:42'), + ('green', 78,'Harry','hburton25@youtube.com','156.47.176.237','1978-03-26 05:53:33'), + ('green', 79,'Kevin','klawrence26@hao123.com','79.136.183.83','1994-10-12 04:38:52'), + ('green', 80,'David','dhall27@prweb.com','133.149.172.153','1976-12-15 16:24:24'), + ('green', 81,'Kathy','kperry28@twitter.com','229.242.72.228','1979-03-04 02:58:56'), + ('green', 82,'Adam','aprice29@elegantthemes.com','13.145.21.10','1982-11-07 11:46:59'), + ('green', 83,'Brandon','bgriffin2a@va.gov','73.249.128.212','2013-10-30 05:30:36'), + ('green', 84,'Henry','hnguyen2b@discovery.com','211.36.214.242','1985-01-09 06:37:27'), + ('green', 85,'Eric','esanchez2c@edublogs.org','191.166.188.251','2004-05-01 23:21:42'), + ('green', 86,'Jason','jlee2d@jimdo.com','193.92.16.182','1973-01-08 09:05:39'), + ('green', 87,'Diana','drichards2e@istockphoto.com','19.130.175.245','1994-10-05 22:50:49'), + ('green', 88,'Andrea','awelch2f@abc.net.au','94.155.233.96','2002-04-26 08:41:44'), + ('green', 89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111','2003-08-25 07:56:39'), + ('green', 90,'Jane','jsims2h@seesaa.net','43.4.220.135','1987-03-20 20:39:04'), + ('green', 91,'Larry','lgrant2i@si.edu','97.126.79.34','2000-09-07 20:26:19'), + ('green', 92,'Louis','ldean2j@prnewswire.com','37.148.40.127','2011-09-16 20:12:14'), + ('green', 93,'Jennifer','jcampbell2k@xing.com','38.106.254.142','1988-07-15 05:06:49'), + ('green', 94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187','2009-12-15 06:16:54'), + ('green', 95,'Lori','lstevens2m@icq.com','181.250.181.58','1984-10-28 03:29:19'), + ('green', 96,'Judy','jsimpson2n@marriott.com','180.121.239.219','1986-02-07 15:18:10'), + ('green', 97,'Phillip','phoward2o@usa.gov','255.247.0.175','2002-12-26 08:44:45'), + ('green', 98,'Gloria','gwalker2p@usa.gov','156.140.7.128','1997-10-04 07:58:58'), + ('green', 99,'Paul','pjohnson2q@umn.edu','183.59.198.197','1991-11-14 12:33:55'), + ('green', 100,'Frank','fgreene2r@blogspot.com','150.143.68.121','2010-06-12 23:55:39'); diff --git a/tests/functional/schema_tests/fixtures.py b/tests/functional/schema_tests/fixtures.py new file mode 100644 index 000000000..51ae067bd --- /dev/null +++ b/tests/functional/schema_tests/fixtures.py @@ -0,0 +1,1275 @@ +wrong_specification_block__schema_yml = """ +version: 2 +models: + - name: some_seed + description: "This is my seed under a model" +""" + +test_context_where_subq_models__schema_yml = """ +version: 2 + +models: + - name: model_a + data_tests: + - self_referential + +""" + +test_context_where_subq_models__model_a_sql = """ +select 1 as fun + +""" + +test_utils__dbt_project_yml = """ +name: 'test_utils' +version: '1.0' +config-version: 2 + +profile: 'default' + +macro-paths: ["macros"] + + +""" + +test_utils__macros__current_timestamp_sql = """ +{% macro current_timestamp() -%} + {{ return(adapter.dispatch('current_timestamp', 'test_utils')()) }} +{%- endmacro %} + +{% macro default__current_timestamp() -%} + now() +{%- endmacro %} + +""" + +test_utils__macros__custom_test_sql = """ +{% macro test_dispatch(model) -%} + {{ return(adapter.dispatch('test_dispatch', macro_namespace = 'test_utils')()) }} +{%- endmacro %} + +{% macro default__test_dispatch(model) %} + select {{ adapter.dispatch('current_timestamp', macro_namespace = 'test_utils')() }} +{% endmacro %} + +""" + +local_dependency__dbt_project_yml = """ +name: 'local_dep' +version: '1.0' +config-version: 2 + +profile: 'default' + +macro-paths: ["macros"] + +""" + +local_dependency__macros__equality_sql = """ +{#-- taken from dbt-utils --#} +{% test equality(model, compare_model, compare_columns=None) %} + {{ return(adapter.dispatch('test_equality')(model, compare_model, compare_columns)) }} +{% endtest %} + +{% macro default__test_equality(model, compare_model, compare_columns=None) %} + +{% set set_diff %} + count(*) + abs( + sum(case when which_diff = 'a_minus_b' then 1 else 0 end) - + sum(case when which_diff = 'b_minus_a' then 1 else 0 end) + ) +{% endset %} + +{#-- Needs to be set at parse time, before we return '' below --#} +{{ config(fail_calc = set_diff) }} + +{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #} +{%- if not execute -%} + {{ return('') }} +{% endif %} +-- setup +{%- do dbt_utils._is_relation(model, 'test_equality') -%} +{#- +If the compare_cols arg is provided, we can run this test without querying the +information schema — this allows the model to be an ephemeral model +-#} + +{%- if not compare_columns -%} + {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%} + {%- set compare_columns = adapter.get_columns_in_relation(model) | map(attribute='quoted') -%} +{%- endif -%} + +{% set compare_cols_csv = compare_columns | join(', ') %} + +with a as ( + select * from {{ model }} +), +b as ( + select * from {{ compare_model }} +), +a_minus_b as ( + select {{compare_cols_csv}} from a + {{ dbt_utils.except() }} + select {{compare_cols_csv}} from b +), +b_minus_a as ( + select {{compare_cols_csv}} from b + {{ dbt_utils.except() }} + select {{compare_cols_csv}} from a +), + +unioned as ( + + select 'a_minus_b' as which_diff, * from a_minus_b + union all + select 'b_minus_a' as which_diff, * from b_minus_a + +) + +select * from unioned + +{% endmacro %} + +""" + +case_sensitive_models__schema_yml = """ +version: 2 + +models: + - name: lowercase + columns: + - name: id + quote: true + data_tests: + - unique + - name: uppercase + columns: + - name: id + quote: true + data_tests: + - unique + +""" + +case_sensitive_models__uppercase_SQL = """ +select 1 as id + +""" + +case_sensitive_models__lowercase_sql = """ +select 1 as id + +""" + +test_context_macros__my_test_sql = """ +{% macro test_call_pkg_macro(model) %} + select {{ adapter.dispatch('current_timestamp', macro_namespace = 'local_utils')() }} +{% endmacro %} + +""" + +test_context_macros__test_my_datediff_sql = """ +{% macro test_my_datediff(model) %} + select {{ local_utils.datediff() }} +{% endmacro %} + +""" + +test_context_macros__custom_schema_tests_sql = """ +{% test type_one(model) %} + + select * from ( + + select * from {{ model }} + union all + select * from {{ ref('model_b') }} + + ) as Foo + +{% endtest %} + +{% test type_two(model) %} + + {{ config(severity = "WARN") }} + + select * from {{ model }} + +{% endtest %} + +""" + +test_context_models_namespaced__schema_yml = """ + +version: 2 + +models: + - name: model_a + data_tests: + - type_one + - type_two + - name: model_c + data_tests: + - call_pkg_macro + - test_utils.dispatch + +""" + +test_context_models_namespaced__model_c_sql = """ +select 1 as fun + +""" + +test_context_models_namespaced__model_b_sql = """ +select 1 as notfun + +""" + +test_context_models_namespaced__model_a_sql = """ +select 1 as fun + +""" + +macros_v2__override_get_test_macros_fail__get_test_sql_sql = """ +{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} + select + {{ fail_calc }} as failures, + case when {{ fail_calc }} {{ warn_if }} then 'x' else 'y' end as should_warn, + case when {{ fail_calc }} {{ error_if }} then 'x' else 'y' end as should_error + from ( + {{ main_sql }} + {{ "limit " ~ limit if limit != none }} + ) dbt_internal_test +{% endmacro %} +""" + +macros_v2__macros__tests_sql = """ +{% test every_value_is_blue(model, column_name) %} + + select * + from {{ model }} + where {{ column_name }} != 'blue' + +{% endtest %} + + +{% test rejected_values(model, column_name, values) %} + + select * + from {{ model }} + where {{ column_name }} in ( + {% for value in values %} + '{{ value }}' {% if not loop.last %} , {% endif %} + {% endfor %} + ) + +{% endtest %} + + +{% test equivalent(model, value) %} + {% set expected = 'foo-bar' %} + {% set eq = 1 if value == expected else 0 %} + {% set validation_message -%} + 'got "{{ value }}", expected "{{ expected }}"' + {%- endset %} + {% if eq == 0 and execute %} + {{ log(validation_message, info=True) }} + {% endif %} + + select {{ validation_message }} as validation_error + where {{ eq }} = 0 +{% endtest %} + + +""" + +macros_v2__override_get_test_macros__get_test_sql_sql = """ +{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} + select + {{ fail_calc }} as failures, + case when {{ fail_calc }} {{ warn_if }} then 1 else 0 end as should_warn, + case when {{ fail_calc }} {{ error_if }} then 1 else 0 end as should_error + from ( + {{ main_sql }} + {{ "limit " ~ limit if limit != none }} + ) dbt_internal_test +{%- endmacro %} +""" + +macros_v2__custom_configs__test_sql = """ +{% test where(model, column_name) %} + {{ config(where = "1 = 0") }} + select * from {{ model }} +{% endtest %} + +{% test error_if(model, column_name) %} + {{ config(error_if = "<= 0", warn_if = "<= 0") }} + select * from {{ model }} +{% endtest %} + + +{% test warn_if(model, column_name) %} + {{ config(warn_if = "<= 0", severity = "WARN") }} + select * from {{ model }} +{% endtest %} + +{% test limit(model, column_name) %} + {{ config(limit = 0) }} + select * from {{ model }} +{% endtest %} + +{% test fail_calc(model, column_name) %} + {{ config(fail_calc = "count(*) - count(*)") }} + select * from {{ model }} +{% endtest %} + +""" + +test_context_macros_namespaced__my_test_sql = """ +{% macro test_call_pkg_macro(model) %} + select {{ test_utils.current_timestamp() }} +{% endmacro %} + +""" + +test_context_macros_namespaced__custom_schema_tests_sql = """ +{% test type_one(model) %} + + select * from ( + + select * from {{ model }} + union all + select * from {{ ref('model_b') }} + + ) as Foo + +{% endtest %} + +{% test type_two(model) %} + + {{ config(severity = "WARN") }} + + select * from {{ model }} + +{% endtest %} + +""" + +seeds__some_seed_csv = """ +col_int,col_str +1,hello +2,goodbye +""" + +test_context_models__schema_yml = """ + +version: 2 + +models: + - name: model_a + data_tests: + - type_one + - type_two + - name: model_c + data_tests: + - call_pkg_macro + - local_utils.dispatch + - my_datediff + +""" + +test_context_models__model_c_sql = """ +select 1 as fun + +""" + +test_context_models__model_b_sql = """ +select 1 as notfun + +""" + +test_context_models__model_a_sql = """ +select 1 as fun + +""" + +name_collision__schema_yml = """ +version: 2 +models: +- name: base + columns: + - name: extension_id + data_tests: + - not_null +- name: base_extension + columns: + - name: id + data_tests: + - not_null + +""" + +name_collision__base_sql = """ +SELECT 'hello_world' AS extension_id +""" + +name_collision__base_extension_sql = """ +SELECT 'NOT_NULL' AS id +""" + + +dupe_generic_tests_collide__schema_yml = """ +version: 2 +models: +- name: model_a + columns: + - name: id + data_tests: + - not_null: + config: + where: "1=1" + - not_null: + config: + where: "1=2" + +""" + +dupe_generic_tests_collide__model_a = """ +SELECT 'NOT_NULL' AS id +""" + + +custom_generic_test_config_custom_macro__schema_yml = """ +version: 2 +models: +- name: model_a + columns: + - name: id + data_tests: + - not_null: + config: + where: "id = (select id from {{ ref('model_a') }} limit 1)" + +""" + +custom_generic_test_config_custom_macro__model_a = """ +SELECT 1 AS id +""" + + +custom_generic_test_names__schema_yml = """ +version: 2 +models: +- name: model_a + columns: + - name: id + data_tests: + - not_null: + name: not_null_where_1_equals_1 + config: + where: "1=1" + - not_null: + name: not_null_where_1_equals_2 + config: + where: "1=2" + +""" + +custom_generic_test_names__model_a = """ +SELECT 'NOT_NULL' AS id +""" + +custom_generic_test_names_alt_format__schema_yml = """ +version: 2 +models: +- name: model_a + columns: + - name: id + data_tests: + - name: not_null_where_1_equals_1 + test_name: not_null + config: + where: "1=1" + - name: not_null_where_1_equals_2 + test_name: not_null + config: + where: "1=2" + +""" + +custom_generic_test_names_alt_format__model_a = """ +SELECT 'NOT_NULL' AS id +""" + + +test_context_where_subq_macros__custom_generic_test_sql = """ +/*{# This test will fail if get_where_subquery() is missing from TestContext + TestMacroNamespace #}*/ + +{% test self_referential(model) %} + + {%- set relation = api.Relation.create(schema=model.schema, identifier=model.table) -%} + {%- set columns = adapter.get_columns_in_relation(relation) -%} + {%- set columns_csv = columns | map(attribute='name') | list | join(', ') -%} + + select {{ columns_csv }} from {{ model }} + limit 0 + +{% endtest %} + +""" + +invalid_schema_models__schema_yml = """ +version: 2 + +models: + name: model + columns: + - name: Id + quote: true + data_tests: + - unique + - not_null + +""" + +invalid_schema_models__model_sql = """ +select 1 as "Id" + +""" + +all_quotes_schema__schema_yml = """# models/schema.yml +# only comments here, which should be okay! +# https://github.com/dbt-labs/dbt-core/issues/3568""" + +models_v2__render_test_cli_arg_models__schema_yml = """ +version: 2 + +models: + - name: model + data_tests: + - equivalent: + value: "{{ var('myvar', 'baz') }}-bar" + +""" + +models_v2__render_test_cli_arg_models__model_sql = """ +select 1 as id + +""" + +models_v2__override_get_test_models__schema_yml = """ +version: 2 + +models: + - name: my_model_pass + description: "The table has 1 null values, and we're okay with that, until it's more than 1." + columns: + - name: id + description: "The number of responses for this favorite color - purple will be null" + data_tests: + - not_null: + error_if: '>1' + warn_if: '>1' + + - name: my_model_warning + description: "The table has 1 null values, and we're okay with that, but let us know" + columns: + - name: id + description: "The number of responses for this favorite color - purple will be null" + data_tests: + - not_null: + error_if: '>1' + + - name: my_model_failure + description: "The table has 2 null values, and we're not okay with that" + columns: + - name: id + description: "The number of responses for this favorite color - purple will be null" + data_tests: + - not_null: + error_if: '>1' + + +""" + +models_v2__override_get_test_models__my_model_warning_sql = """ +select * from {{ ref('my_model_pass') }} +""" + +models_v2__override_get_test_models__my_model_pass_sql = """ +select 1 as id +UNION ALL +select null as id +""" + +models_v2__override_get_test_models__my_model_failure_sql = """ +select * from {{ ref('my_model_pass') }} +UNION ALL +select null as id +""" + +models_v2__models__schema_yml = """ +version: 2 + +models: + - name: table_copy + description: "A copy of the table" + columns: + - name: id + description: "The ID" + data_tests: + - not_null + - unique + tags: + - table_id + - name: first_name + description: "The user's first name" + data_tests: + - not_null + tags: + - table_first_name + - name: ip_address + description: "The user's IP address" + data_tests: + - not_null + - name: updated_at + description: "The update time of the user" + data_tests: + - not_null + - name: email + description: "The user's email address" + data_tests: + - unique + - name: favorite_color + description: "The user's favorite color" + data_tests: + - accepted_values: { + values: ['blue', 'green'], + quote: true, + tags: table_copy_favorite_color # tags can be a single string + } + tags: + - table_favorite_color + - name: fav_number + description: "The user's favorite number" + data_tests: + - accepted_values: + values: [3.14159265] + quote: false + tags: # tags can be a list of strings + - favorite_number_is_pi + + + - name: table_summary + description: "The summary table" + columns: + - name: favorite_color_copy + description: "The favorite color" + data_tests: + - not_null + - unique + - accepted_values: { values: ['blue', 'green'] } + - relationships: { field: favorite_color, to: ref('table_copy') } + tags: + - table_favorite_color + - name: count + description: "The number of responses for this favorite color" + data_tests: + - not_null + +# all of these constraints will fail + - name: table_failure_copy + description: "The table copy that does not comply with the schema" + columns: + - name: id + description: "The user ID" + data_tests: + - not_null + - unique + tags: + - xfail + - name: favorite_color + description: "The user's favorite color" + data_tests: + - accepted_values: { values: ['blue', 'green'] } + tags: + - xfail + +# all of these constraints will fail + - name: table_failure_summary + description: "The table summary that does not comply with the schema" + columns: + - name: favorite_color + description: "The favorite color" + data_tests: + - accepted_values: { values: ['red'] } + - relationships: { field: favorite_color, to: ref('table_copy') } + tags: + - xfail + +# this table is disabled so these tests should be ignored + - name: table_disabled + description: "A disabled table" + columns: + - name: favorite_color + description: "The favorite color" + data_tests: + - accepted_values: { values: ['red'] } + - relationships: { field: favorite_color, to: ref('table_copy') } + +# all of these constraints will fail + - name: table_failure_null_relation + description: "A table with a null value where it should be a foreign key" + columns: + - name: id + description: "The user ID" + data_tests: + - relationships: { field: id, to: ref('table_failure_copy') } + tags: + - xfail + +""" + +models_v2__models__table_summary_sql = """ +{{ + config( + materialized='table' + ) +}} + +select favorite_color as favorite_color_copy, count(*) as count +from {{ ref('table_copy') }} +group by 1 + +""" + +models_v2__models__table_failure_summary_sql = """ +{{ + config( + materialized='table' + ) +}} + +-- force a foreign key constraint failure here +select 'purple' as favorite_color, count(*) as count +from {{ ref('table_failure_copy') }} +group by 1 + +""" + +models_v2__models__table_disabled_sql = """ +{{ + config( + enabled=False + ) +}} + +-- force a foreign key constraint failure here +select 'purple' as favorite_color, count(*) as count +from {{ ref('table_failure_copy') }} +group by 1 + +""" + +models_v2__models__table_failure_null_relation_sql = """ +{{ + config( + materialized='table' + ) +}} + +-- force a foreign key constraint failure here +select 105 as id, count(*) as count +from {{ ref('table_failure_copy') }} +group by 1 + +""" + +models_v2__models__table_failure_copy_sql = """ + +{{ + config( + materialized='table' + ) +}} + +select * from {{ this.schema }}.seed_failure + +""" + +models_v2__models__table_copy_sql = """ + +{{ + config( + materialized='table' + ) +}} + +select * from {{ this.schema }}.seed + +""" + +models_v2__malformed__schema_yml = """ +version: 2 + +models: + # this whole model should fail and not run + - name: table_copy + description: "A copy of the table" + columns: + - name: id + description: "The ID" + data_tests: + - not_null + - unique + - name: favorite_color + data_tests: + # this is missing a "-" and is malformed + accepted_values: { values: ['blue', 'green'] } + + # this whole model should pass and run + - name: table_summary + description: "The summary table" + columns: + - name: favorite_color + description: "The favorite color" + data_tests: + - not_null + - unique + - accepted_values: { values: ['blue', 'green'] } + - relationships: { field: favorite_color, to: ref('table_copy') } + - name: count + description: "The number of responses for this favorite color" + data_tests: + - not_null + +""" + +models_v2__malformed__table_summary_sql = """ +{{ + config( + materialized='table' + ) +}} + +select favorite_color, count(*) as count +from {{ ref('table_copy') }} +group by 1 + +""" + +models_v2__malformed__table_copy_sql = """ + +{{ + config( + materialized='table' + ) +}} + +select * from {{ this.schema }}.seed + +""" + +models_v2__override_get_test_models_fail__schema_yml = """ +version: 2 + +models: + - name: my_model + description: "The table has 1 null values, and we're not okay with that." + columns: + - name: id + description: "The number of responses for this favorite color - purple will be null" + data_tests: + - not_null + + + +""" + +models_v2__override_get_test_models_fail__my_model_sql = """ +select 1 as id +UNION ALL +select null as id +""" + +models_v2__custom_configs__schema_yml = """ +version: 2 + +models: + - name: table_copy + description: "A copy of the table" + # passes + data_tests: + - where + - error_if + - warn_if + - limit + - fail_calc + columns: + - name: id + data_tests: + # relationships with where + - relationships: + to: ref('table_copy') # itself + field: id + where: 1=1 + - name: table_copy_another_one + data_tests: + - where: # test override + weird quoting + config: + where: "\\"favorite_color\\" = 'red'" + - name: "table.copy.with.dots" + description: "A copy of the table with a gross name" + # passes, see https://github.com/dbt-labs/dbt-core/issues/3857 + data_tests: + - where + +""" + +models_v2__custom_configs__table_copy_another_one_sql = """ +select * from {{ ref('table_copy') }} + +""" + +models_v2__custom_configs__table_copy_sql = """ + +{{ + config( + materialized='table' + ) +}} + +select * from {{ this.schema }}.seed + +""" + +models_v2__custom_configs__table_copy_with_dots_sql = """ +select * from {{ ref('table_copy') }} + +""" + +models_v2__render_test_configured_arg_models__schema_yml = """ +version: 2 + +models: + - name: model + data_tests: + - equivalent: + value: "{{ var('myvar', 'baz') }}-bar" + +""" + +models_v2__render_test_configured_arg_models__model_sql = """ +select 1 as id + +""" + +models_v2__custom__schema_yml = """ +version: 2 + +models: + - name: table_copy + description: "A copy of the table" + columns: + - name: email + data_tests: + - not_null + - name: id + description: "The ID" + data_tests: + - unique + - name: favorite_color + data_tests: + - every_value_is_blue + - rejected_values: { values: ['orange', 'purple'] } + # passes + data_tests: + - local_dep.equality: { compare_model: ref('table_copy') } + +""" + +models_v2__custom__table_copy_sql = """ + +{{ + config( + materialized='table' + ) +}} + +select * from {{ this.schema }}.seed + +""" + +models_v2__limit_null__schema_yml = """ +version: 2 + +models: + - name: table_limit_null + description: "The table has 1 null values, and we're okay with that, until it's more than 1." + columns: + - name: favorite_color_full_list + description: "The favorite color" + - name: count + description: "The number of responses for this favorite color - purple will be null" + data_tests: + - not_null: + error_if: '>1' + warn_if: '>1' + + - name: table_warning_limit_null + description: "The table has 1 null value, and we're okay with 1, but want to know of any." + columns: + - name: favorite_color_full_list + description: "The favorite color" + - name: count + description: "The number of responses for this favorite color - purple will be null" + data_tests: + - not_null: + error_if: '>1' + + - name: table_failure_limit_null + description: "The table has some 2 null values, and that's not ok. Warn and error." + columns: + - name: favorite_color_full_list + description: "The favorite color" + - name: count + description: "The number of responses for this favorite color - purple will be null" + data_tests: + - not_null: + error_if: '>1' + +""" + +models_v2__limit_null__table_warning_limit_null_sql = """ +{{ + config( + materialized='table' + ) +}} + +select * from {{ref('table_limit_null')}} +""" + +models_v2__limit_null__table_limit_null_sql = """ +{{ + config( + materialized='table' + ) +}} + +select favorite_color as favorite_color_full_list, count(*) as count +from {{ this.schema }}.seed +group by 1 + +UNION ALL + +select 'purple' as favorite_color_full_list, null as count +""" + +models_v2__limit_null__table_failure_limit_null_sql = """ +{{ + config( + materialized='table' + ) +}} + +select * from {{ref('table_limit_null')}} + +UNION ALL + +select 'magenta' as favorite_color_full_list, null as count +""" + +local_utils__dbt_project_yml = """ +name: 'local_utils' +version: '1.0' +config-version: 2 + +profile: 'default' + +macro-paths: ["macros"] + + +""" + +local_utils__macros__datediff_sql = """ +{% macro datediff(first_date, second_date, datepart) %} + {{ return(adapter.dispatch('datediff', 'local_utils')(first_date, second_date, datepart)) }} +{% endmacro %} + + +{% macro default__datediff(first_date, second_date, datepart) %} + + datediff( + {{ datepart }}, + {{ first_date }}, + {{ second_date }} + ) + +{% endmacro %} + + +{% macro postgres__datediff(first_date, second_date, datepart) %} + + {% if datepart == 'year' %} + (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date)) + {% elif datepart == 'quarter' %} + ({{ adapter.dispatch('datediff', 'local_utils')(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date)) + {% else %} + ( 1000 ) + {% endif %} + +{% endmacro %} + + +""" + +local_utils__macros__current_timestamp_sql = """ +{% macro current_timestamp() -%} + {{ return(adapter.dispatch('current_timestamp')) }} +{%- endmacro %} + +{% macro default__current_timestamp() -%} + now() +{%- endmacro %} + +""" + +local_utils__macros__custom_test_sql = """ +{% macro test_dispatch(model) -%} + {{ return(adapter.dispatch('test_dispatch', macro_namespace = 'local_utils')()) }} +{%- endmacro %} + +{% macro default__test_dispatch(model) %} + select {{ adapter.dispatch('current_timestamp', macro_namespace = 'local_utils')() }} +{% endmacro %} + +""" + +ephemeral__schema_yml = """ + +version: 2 +models: + - name: ephemeral + columns: + - name: id + data_tests: + - unique + +""" + +ephemeral__ephemeral_sql = """ + +{{ config(materialized='ephemeral') }} + +select 1 as id + +""" + +quote_required_models__schema_yml = """ +version: 2 + +models: + - name: model + columns: + - name: Id + quote: true + data_tests: + - unique + - not_null + - name: model_again + quote_columns: true + columns: + - name: Id + data_tests: + - unique + - not_null + - name: model_noquote + quote_columns: true + columns: + - name: Id + quote: false + data_tests: + - unique + - not_null + +sources: + # this should result in column quoting = true + - name: my_source + schema: "{{ target.schema }}" + quoting: + column: true + tables: + - name: model + quoting: + column: false + columns: + - name: Id + quote: true + data_tests: + - unique + - name: my_source_2 + schema: "{{ target.schema }}" + quoting: + column: false + tables: + # this should result in column quoting = true + - name: model + quoting: + column: true + columns: + - name: Id + data_tests: + - unique + # this should result in column quoting = false + - name: model_noquote + columns: + - name: Id + data_tests: + - unique + + +""" + +quote_required_models__model_again_sql = """ +select 1 as "Id" + +""" + +quote_required_models__model_noquote_sql = """ +select 1 as id + +""" + +quote_required_models__model_sql = """ +select 1 as "Id" + +""" + +alt_local_utils__macros__type_timestamp_sql = """ +{%- macro type_timestamp() -%} + {{ return(adapter.dispatch('type_timestamp', 'local_utils')()) }} +{%- endmacro -%} + +{% macro default__type_timestamp() %} + {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }} +{% endmacro %} +""" + +macro_resolution_order_macros__my_custom_test_sql = """ +{% test my_custom_test(model) %} + select cast(current_timestamp as {{ dbt.type_timestamp() }}) + limit 0 +{% endtest %} +""" + +macro_resolution_order_models__my_model_sql = """ +select 1 as id +""" + +macro_resolution_order_models__config_yml = """ +version: 2 +models: + - name: my_model + data_tests: + - my_custom_test +""" diff --git a/tests/functional/schema_tests/test_schema_v2_tests.py b/tests/functional/schema_tests/test_schema_v2_tests.py new file mode 100644 index 000000000..79771c50b --- /dev/null +++ b/tests/functional/schema_tests/test_schema_v2_tests.py @@ -0,0 +1,1130 @@ +import os +import re + +from dbt.contracts.results import TestStatus +from dbt.exceptions import ParsingError, DuplicateResourceNameError +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import run_dbt, write_file +from dbt_common.exceptions import CompilationError +import pytest + +from tests.functional.schema_tests.fixtures import ( + alt_local_utils__macros__type_timestamp_sql, + all_quotes_schema__schema_yml, + case_sensitive_models__lowercase_sql, + case_sensitive_models__schema_yml, + case_sensitive_models__uppercase_SQL, + custom_generic_test_config_custom_macro__model_a, + custom_generic_test_config_custom_macro__schema_yml, + custom_generic_test_names__model_a, + custom_generic_test_names__schema_yml, + custom_generic_test_names_alt_format__model_a, + custom_generic_test_names_alt_format__schema_yml, + dupe_generic_tests_collide__model_a, + dupe_generic_tests_collide__schema_yml, + ephemeral__ephemeral_sql, + ephemeral__schema_yml, + invalid_schema_models__model_sql, + invalid_schema_models__schema_yml, + local_dependency__dbt_project_yml, + local_dependency__macros__equality_sql, + local_utils__dbt_project_yml, + local_utils__macros__current_timestamp_sql, + local_utils__macros__custom_test_sql, + local_utils__macros__datediff_sql, + macro_resolution_order_models__config_yml, + macro_resolution_order_macros__my_custom_test_sql, + macro_resolution_order_models__my_model_sql, + macros_v2__custom_configs__test_sql, + macros_v2__macros__tests_sql, + macros_v2__override_get_test_macros__get_test_sql_sql, + macros_v2__override_get_test_macros_fail__get_test_sql_sql, + models_v2__custom__schema_yml, + models_v2__custom__table_copy_sql, + models_v2__custom_configs__schema_yml, + models_v2__custom_configs__table_copy_another_one_sql, + models_v2__custom_configs__table_copy_sql, + models_v2__custom_configs__table_copy_with_dots_sql, + models_v2__limit_null__schema_yml, + models_v2__limit_null__table_failure_limit_null_sql, + models_v2__limit_null__table_limit_null_sql, + models_v2__limit_null__table_warning_limit_null_sql, + models_v2__malformed__schema_yml, + models_v2__malformed__table_copy_sql, + models_v2__malformed__table_summary_sql, + models_v2__models__schema_yml, + models_v2__models__table_copy_sql, + models_v2__models__table_disabled_sql, + models_v2__models__table_failure_copy_sql, + models_v2__models__table_failure_null_relation_sql, + models_v2__models__table_failure_summary_sql, + models_v2__models__table_summary_sql, + models_v2__override_get_test_models__my_model_failure_sql, + models_v2__override_get_test_models__my_model_pass_sql, + models_v2__override_get_test_models__my_model_warning_sql, + models_v2__override_get_test_models__schema_yml, + models_v2__override_get_test_models_fail__my_model_sql, + models_v2__override_get_test_models_fail__schema_yml, + models_v2__render_test_cli_arg_models__model_sql, + models_v2__render_test_cli_arg_models__schema_yml, + models_v2__render_test_configured_arg_models__model_sql, + models_v2__render_test_configured_arg_models__schema_yml, + name_collision__base_sql, + name_collision__base_extension_sql, + name_collision__schema_yml, + quote_required_models__model_again_sql, + quote_required_models__model_noquote_sql, + quote_required_models__model_sql, + quote_required_models__schema_yml, + seeds__some_seed_csv, + test_context_where_subq_models__model_a_sql, + test_context_where_subq_models__schema_yml, + test_context_macros__custom_schema_tests_sql, + test_context_macros__my_test_sql, + test_context_macros__test_my_datediff_sql, + test_context_models__model_a_sql, + test_context_models__model_b_sql, + test_context_models__model_c_sql, + test_context_models__schema_yml, + test_context_macros_namespaced__custom_schema_tests_sql, + test_context_models_namespaced__model_a_sql, + test_context_models_namespaced__model_b_sql, + test_context_models_namespaced__model_c_sql, + test_context_macros_namespaced__my_test_sql, + test_context_models_namespaced__schema_yml, + test_context_where_subq_macros__custom_generic_test_sql, + test_utils__dbt_project_yml, + test_utils__macros__current_timestamp_sql, + test_utils__macros__custom_test_sql, + wrong_specification_block__schema_yml, +) + + +class TestSchemaTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) + project.run_sql_file(os.path.join(project.test_data_dir, "seed_failure.sql")) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__models__schema_yml, + "table_summary.sql": models_v2__models__table_summary_sql, + "table_failure_summary.sql": models_v2__models__table_failure_summary_sql, + "table_disabled.sql": models_v2__models__table_disabled_sql, + "table_failure_null_relation.sql": models_v2__models__table_failure_null_relation_sql, + "table_failure_copy.sql": models_v2__models__table_failure_copy_sql, + "table_copy.sql": models_v2__models__table_copy_sql, + } + + def assertTestFailed(self, result): + assert result.status == "fail" + assert not result.skipped + assert result.failures > 0, "test {} did not fail".format(result.node.name) + + def assertTestPassed(self, result): + assert result.status == "pass" + assert not result.skipped + assert result.failures == 0, "test {} failed".format(result.node.name) + + def test_schema_tests( + self, + project, + ): + results = run_dbt() + assert len(results) == 5 + test_results = run_dbt(["test"], expect_pass=False) + # If the disabled model's tests ran, there would be 20 of these. + assert len(test_results) == 19 + + for result in test_results: + # assert that all deliberately failing tests actually fail + if "failure" in result.node.name: + self.assertTestFailed(result) + # assert that actual tests pass + else: + self.assertTestPassed(result) + assert sum(x.failures for x in test_results) == 6 + + def test_schema_test_selection( + self, + project, + ): + results = run_dbt() + assert len(results) == 5 + test_results = run_dbt(["test", "--models", "tag:table_favorite_color"]) + # 1 in table_copy, 4 in table_summary + assert len(test_results) == 5 + for result in test_results: + self.assertTestPassed(result) + + test_results = run_dbt(["test", "--models", "tag:favorite_number_is_pi"]) + assert len(test_results) == 1 + self.assertTestPassed(test_results[0]) + + test_results = run_dbt(["test", "--models", "tag:table_copy_favorite_color"]) + assert len(test_results) == 1 + self.assertTestPassed(test_results[0]) + + def test_schema_test_exclude_failures( + self, + project, + ): + results = run_dbt() + assert len(results) == 5 + test_results = run_dbt(["test", "--exclude", "tag:xfail"]) + # If the failed + disabled model's tests ran, there would be 20 of these. + assert len(test_results) == 13 + for result in test_results: + self.assertTestPassed(result) + test_results = run_dbt(["test", "--models", "tag:xfail"], expect_pass=False) + assert len(test_results) == 6 + for result in test_results: + self.assertTestFailed(result) + + +class TestLimitedSchemaTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__limit_null__schema_yml, + "table_warning_limit_null.sql": models_v2__limit_null__table_warning_limit_null_sql, + "table_limit_null.sql": models_v2__limit_null__table_limit_null_sql, + "table_failure_limit_null.sql": models_v2__limit_null__table_failure_limit_null_sql, + } + + def assertTestFailed(self, result): + assert result.status == "fail" + assert not result.skipped + assert result.failures > 0, "test {} did not fail".format(result.node.name) + + def assertTestWarn(self, result): + assert result.status == "warn" + assert not result.skipped + assert result.failures > 0, "test {} passed without expected warning".format( + result.node.name + ) + + def assertTestPassed(self, result): + assert result.status == "pass" + assert not result.skipped + assert result.failures == 0, "test {} failed".format(result.node.name) + + def test_limit_schema_tests( + self, + project, + ): + results = run_dbt() + assert len(results) == 3 + test_results = run_dbt(["test"], expect_pass=False) + assert len(test_results) == 3 + + for result in test_results: + # assert that all deliberately failing tests actually fail + if "failure" in result.node.name: + self.assertTestFailed(result) + # assert that tests with warnings have them + elif "warning" in result.node.name: + self.assertTestWarn(result) + # assert that actual tests pass + else: + self.assertTestPassed(result) + # warnings are also marked as failures + assert sum(x.failures for x in test_results) == 3 + + +class TestDefaultBoolType: + # test with default True/False in get_test_sql macro + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__override_get_test_models__schema_yml, + "my_model_warning.sql": models_v2__override_get_test_models__my_model_warning_sql, + "my_model_pass.sql": models_v2__override_get_test_models__my_model_pass_sql, + "my_model_failure.sql": models_v2__override_get_test_models__my_model_failure_sql, + } + + def assertTestFailed(self, result): + assert result.status == "fail" + assert not result.skipped + assert result.failures > 0, "test {} did not fail".format(result.node.name) + + def assertTestWarn(self, result): + assert result.status == "warn" + assert not result.skipped + assert result.failures > 0, "test {} passed without expected warning".format( + result.node.name + ) + + def assertTestPassed(self, result): + assert result.status == "pass" + assert not result.skipped + assert result.failures == 0, "test {} failed".format(result.node.name) + + def test_limit_schema_tests( + self, + project, + ): + results = run_dbt() + assert len(results) == 3 + test_results = run_dbt(["test"], expect_pass=False) + assert len(test_results) == 3 + + for result in test_results: + # assert that all deliberately failing tests actually fail + if "failure" in result.node.name: + self.assertTestFailed(result) + # assert that tests with warnings have them + elif "warning" in result.node.name: + self.assertTestWarn(result) + # assert that actual tests pass + else: + self.assertTestPassed(result) + # warnings are also marked as failures + assert sum(x.failures for x in test_results) == 3 + + +class TestOtherBoolType: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + macros_v2_file = { + "override_get_test_macros": { + "get_test_sql.sql": macros_v2__override_get_test_macros__get_test_sql_sql + }, + } + write_project_files(project_root, "macros-v2", macros_v2_file) + + # test with expected 0/1 in custom get_test_sql macro + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__override_get_test_models__schema_yml, + "my_model_warning.sql": models_v2__override_get_test_models__my_model_warning_sql, + "my_model_pass.sql": models_v2__override_get_test_models__my_model_pass_sql, + "my_model_failure.sql": models_v2__override_get_test_models__my_model_failure_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "macro-paths": ["macros-v2/override_get_test_macros"], + } + + def assertTestFailed(self, result): + assert result.status == "fail" + assert not result.skipped + assert result.failures > 0, "test {} did not fail".format(result.node.name) + + def assertTestWarn(self, result): + assert result.status == "warn" + assert not result.skipped + assert result.failures > 0, "test {} passed without expected warning".format( + result.node.name + ) + + def assertTestPassed(self, result): + assert result.status == "pass" + assert not result.skipped + assert result.failures == 0, "test {} failed".format(result.node.name) + + def test_limit_schema_tests( + self, + project, + ): + results = run_dbt() + assert len(results) == 3 + test_results = run_dbt(["test"], expect_pass=False) + assert len(test_results) == 3 + + for result in test_results: + # assert that all deliberately failing tests actually fail + if "failure" in result.node.name: + self.assertTestFailed(result) + # assert that tests with warnings have them + elif "warning" in result.node.name: + self.assertTestWarn(result) + # assert that actual tests pass + else: + self.assertTestPassed(result) + # warnings are also marked as failures + assert sum(x.failures for x in test_results) == 3 + + +class TestNonBoolType: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + macros_v2_file = { + "override_get_test_macros_fail": { + "get_test_sql.sql": macros_v2__override_get_test_macros_fail__get_test_sql_sql + }, + } + write_project_files(project_root, "macros-v2", macros_v2_file) + + # test with invalid 'x'/'y' in custom get_test_sql macro + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__override_get_test_models_fail__schema_yml, + "my_model.sql": models_v2__override_get_test_models_fail__my_model_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "macro-paths": ["macros-v2/override_get_test_macros_fail"], + } + + def test_limit_schema_tests( + self, + project, + ): + results = run_dbt() + assert len(results) == 1 + run_result = run_dbt(["test"], expect_pass=False) + results = run_result.results + assert len(results) == 1 + assert results[0].status == TestStatus.Error + assert re.search(r"'get_test_sql' returns 'x'", results[0].message) + + +class TestMalformedSchemaTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__malformed__schema_yml, + "table_summary.sql": models_v2__malformed__table_summary_sql, + "table_copy.sql": models_v2__malformed__table_copy_sql, + } + + def test_malformed_schema_will_break_run( + self, + project, + ): + with pytest.raises(ParsingError): + run_dbt() + + +class TestCustomConfigSchemaTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project, project_root): + project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) + + macros_v2_file = {"custom-configs": {"test.sql": macros_v2__custom_configs__test_sql}} + write_project_files(project_root, "macros-v2", macros_v2_file) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__custom_configs__schema_yml, + "table_copy_another_one.sql": models_v2__custom_configs__table_copy_another_one_sql, + "table_copy.sql": models_v2__custom_configs__table_copy_sql, + "table.copy.with.dots.sql": models_v2__custom_configs__table_copy_with_dots_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "macro-paths": ["macros-v2/custom-configs"], + } + + def test_config( + self, + project, + ): + """Test that tests use configs properly. All tests for + this project will fail, configs are set to make test pass.""" + results = run_dbt(["test"], expect_pass=False) + + assert len(results) == 8 + for result in results: + assert not result.skipped + + +class TestHooksInTests: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": ephemeral__schema_yml, + "ephemeral.sql": ephemeral__ephemeral_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "on-run-start": ["{{ log('hooks called in tests -- good!') if execute }}"], + "on-run-end": ["{{ log('hooks called in tests -- good!') if execute }}"], + } + + def test_hooks_do_run_for_tests( + self, + project, + ): + # This passes now that hooks run, a behavior we changed in v1.0 + results = run_dbt(["test", "--model", "ephemeral"]) + assert len(results) == 1 + for result in results: + assert result.status == "pass" + assert not result.skipped + assert result.failures == 0, "test {} failed".format(result.node.name) + + +class TestHooksForWhich: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": ephemeral__schema_yml, + "ephemeral.sql": ephemeral__ephemeral_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "on-run-start": [ + "{{exceptions.raise_compiler_error('hooks called in tests -- error') if (execute and flags.WHICH != 'test') }}" + ], + "on-run-end": [ + "{{exceptions.raise_compiler_error('hooks called in tests -- error') if (execute and flags.WHICH != 'test') }}" + ], + } + + def test_these_hooks_dont_run_for_tests( + self, + project, + ): + # This would fail if the hooks ran + results = run_dbt(["test", "--model", "ephemeral"]) + assert len(results) == 1 + for result in results: + assert result.status == "pass" + assert not result.skipped + assert result.failures == 0, "test {} failed".format(result.node.name) + + +class TestCustomSchemaTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project, project_root, dbt_integration_project): # noqa: F811 + write_project_files(project_root, "dbt_integration_project", dbt_integration_project) + project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) + + local_dependency_files = { + "dbt_project.yml": local_dependency__dbt_project_yml, + "macros": {"equality.sql": local_dependency__macros__equality_sql}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + macros_v2_file = { + "macros": {"tests.sql": macros_v2__macros__tests_sql}, + } + write_project_files(project_root, "macros-v2", macros_v2_file) + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "local": "./local_dependency", + }, + { + "local": "./dbt_integration_project", + }, + ] + } + + @pytest.fixture(scope="class") + def project_config_update(self): + # dbt-utils contains a schema test (equality) + # dbt-integration-project contains a schema.yml file + # both should work! + return { + "config-version": 2, + "macro-paths": ["macros-v2/macros"], + } + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__custom__schema_yml, + "table_copy.sql": models_v2__custom__table_copy_sql, + } + + def test_schema_tests( + self, + project, + ): + run_dbt(["deps"]) + results = run_dbt() + assert len(results) == 4 + + test_results = run_dbt(["test"], expect_pass=False) + assert len(test_results) == 6 + + expected_failures = [ + "not_null_table_copy_email", + "every_value_is_blue_table_copy_favorite_color", + ] + + for result in test_results: + if result.status == "fail": + assert result.node.name in expected_failures + + +class TestQuotedSchemaTestColumns: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": quote_required_models__schema_yml, + "model_again.sql": quote_required_models__model_again_sql, + "model_noquote.sql": quote_required_models__model_noquote_sql, + "model.sql": quote_required_models__model_sql, + } + + def test_quote_required_column( + self, + project, + ): + results = run_dbt() + assert len(results) == 3 + results = run_dbt(["test", "-m", "model"]) + assert len(results) == 2 + results = run_dbt(["test", "-m", "model_again"]) + assert len(results) == 2 + results = run_dbt(["test", "-m", "model_noquote"]) + assert len(results) == 2 + results = run_dbt(["test", "-m", "source:my_source"]) + assert len(results) == 1 + results = run_dbt(["test", "-m", "source:my_source_2"]) + assert len(results) == 2 + + +class TestCliVarsSchemaTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + macros_v2_file = { + "macros": {"tests.sql": macros_v2__macros__tests_sql}, + } + write_project_files(project_root, "macros-v2", macros_v2_file) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__render_test_cli_arg_models__schema_yml, + "model.sql": models_v2__render_test_cli_arg_models__model_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "macro-paths": ["macros-v2/macros"], + } + + def test_argument_rendering( + self, + project, + ): + results = run_dbt() + assert len(results) == 1 + results = run_dbt(["test", "--vars", "{myvar: foo}"]) + assert len(results) == 1 + run_dbt(["test"], expect_pass=False) + + +class TestConfiguredVarsSchemaTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + macros_v2_file = { + "macros": {"tests.sql": macros_v2__macros__tests_sql}, + } + write_project_files(project_root, "macros-v2", macros_v2_file) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_v2__render_test_configured_arg_models__schema_yml, + "model.sql": models_v2__render_test_configured_arg_models__model_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "macro-paths": ["macros-v2/macros"], + "vars": {"myvar": "foo"}, + } + + def test_argument_rendering( + self, + project, + ): + results = run_dbt() + assert len(results) == 1 + results = run_dbt(["test"]) + assert len(results) == 1 + + +class TestSchemaCaseInsensitive: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": case_sensitive_models__schema_yml, + "lowercase.sql": case_sensitive_models__lowercase_sql, + } + + @pytest.fixture(scope="class", autouse=True) + def setUP(self, project): + # Create the uppercase SQL file + model_dir = os.path.join(project.project_root, "models") + write_file(case_sensitive_models__uppercase_SQL, model_dir, "uppercase.SQL") + + def test_schema_lowercase_sql( + self, + project, + ): + results = run_dbt() + assert len(results) == 2 + results = run_dbt(["test", "-m", "lowercase"]) + assert len(results) == 1 + + def test_schema_uppercase_sql( + self, + project, + ): + results = run_dbt() + assert len(results) == 2 + results = run_dbt(["test", "-m", "uppercase"]) + assert len(results) == 1 + + +class TestSchemaTestContext: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_utils_files = { + "dbt_project.yml": local_utils__dbt_project_yml, + "macros": { + "datediff.sql": local_utils__macros__datediff_sql, + "current_timestamp.sql": local_utils__macros__current_timestamp_sql, + "custom_test.sql": local_utils__macros__custom_test_sql, + }, + } + write_project_files(project_root, "local_utils", local_utils_files) + + test_context_macros_files = { + "my_test.sql": test_context_macros__my_test_sql, + "test_my_datediff.sql": test_context_macros__test_my_datediff_sql, + "custom_schema_tests.sql": test_context_macros__custom_schema_tests_sql, + } + write_project_files(project_root, "test-context-macros", test_context_macros_files) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": test_context_models__schema_yml, + "model_c.sql": test_context_models__model_c_sql, + "model_b.sql": test_context_models__model_b_sql, + "model_a.sql": test_context_models__model_a_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "macro-paths": ["test-context-macros"], + "vars": {"local_utils_dispatch_list": ["local_utils"]}, + } + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_utils"}]} + + def test_test_context_tests(self, project): + # This test tests the the TestContext and TestMacroNamespace + # are working correctly + run_dbt(["deps"]) + results = run_dbt() + assert len(results) == 3 + + run_result = run_dbt(["test"], expect_pass=False) + results = run_result.results + results = sorted(results, key=lambda r: r.node.name) + assert len(results) == 5 + # call_pkg_macro_model_c_ + assert results[0].status == TestStatus.Fail + # dispatch_model_c_ + assert results[1].status == TestStatus.Fail + # my_datediff + assert re.search(r"1000", results[2].node.compiled_code) + # type_one_model_a_ + assert results[3].status == TestStatus.Fail + assert re.search(r"union all", results[3].node.compiled_code) + # type_two_model_a_ + assert results[4].status == TestStatus.Warn + assert results[4].node.config.severity == "WARN" + + +class TestSchemaTestContextWithMacroNamespace: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + test_utils_files = { + "dbt_project.yml": test_utils__dbt_project_yml, + "macros": { + "current_timestamp.sql": test_utils__macros__current_timestamp_sql, + "custom_test.sql": test_utils__macros__custom_test_sql, + }, + } + write_project_files(project_root, "test_utils", test_utils_files) + + local_utils_files = { + "dbt_project.yml": local_utils__dbt_project_yml, + "macros": { + "datediff.sql": local_utils__macros__datediff_sql, + "current_timestamp.sql": local_utils__macros__current_timestamp_sql, + "custom_test.sql": local_utils__macros__custom_test_sql, + }, + } + write_project_files(project_root, "local_utils", local_utils_files) + + test_context_macros_namespaced_file = { + "my_test.sql": test_context_macros_namespaced__my_test_sql, + "custom_schema_tests.sql": test_context_macros_namespaced__custom_schema_tests_sql, + } + write_project_files( + project_root, "test-context-macros-namespaced", test_context_macros_namespaced_file + ) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": test_context_models_namespaced__schema_yml, + "model_c.sql": test_context_models_namespaced__model_c_sql, + "model_b.sql": test_context_models_namespaced__model_b_sql, + "model_a.sql": test_context_models_namespaced__model_a_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "macro-paths": ["test-context-macros-namespaced"], + "dispatch": [ + { + "macro_namespace": "test_utils", + "search_order": ["local_utils", "test_utils"], + } + ], + } + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + {"local": "test_utils"}, + {"local": "local_utils"}, + ] + } + + def test_test_context_with_macro_namespace( + self, + project, + ): + # This test tests the the TestContext and TestMacroNamespace + # are working correctly + run_dbt(["deps"]) + results = run_dbt() + assert len(results) == 3 + + run_result = run_dbt(["test"], expect_pass=False) + results = run_result.results + results = sorted(results, key=lambda r: r.node.name) + assert len(results) == 4 + # call_pkg_macro_model_c_ + assert results[0].status == TestStatus.Fail + # dispatch_model_c_ + assert results[1].status == TestStatus.Fail + # type_one_model_a_ + assert results[2].status == TestStatus.Fail + assert re.search(r"union all", results[2].node.compiled_code) + # type_two_model_a_ + assert results[3].status == TestStatus.Warn + assert results[3].node.config.severity == "WARN" + + +class TestSchemaTestNameCollision: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": name_collision__schema_yml, + "base.sql": name_collision__base_sql, + "base_extension.sql": name_collision__base_extension_sql, + } + + def test_collision_test_names_get_hash( + self, + project, + ): + """The models should produce unique IDs with a has appended""" + results = run_dbt() + test_results = run_dbt(["test"]) + + # both models and both tests run + assert len(results) == 2 + assert len(test_results) == 2 + + # both tests have the same unique id except for the hash + expected_unique_ids = [ + "test.test.not_null_base_extension_id.922d83a56c", + "test.test.not_null_base_extension_id.c8d18fe069", + ] + assert test_results[0].node.unique_id in expected_unique_ids + assert test_results[1].node.unique_id in expected_unique_ids + + +class TestGenericTestsCollide: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": dupe_generic_tests_collide__schema_yml, + "model_a.sql": dupe_generic_tests_collide__model_a, + } + + def test_generic_test_collision( + self, + project, + ): + """These tests collide, since only the configs differ""" + with pytest.raises(DuplicateResourceNameError) as exc: + run_dbt() + assert "dbt found two tests with the name" in str(exc.value) + + +class TestGenericTestsConfigCustomMacros: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": custom_generic_test_config_custom_macro__schema_yml, + "model_a.sql": custom_generic_test_config_custom_macro__model_a, + } + + def test_generic_test_config_custom_macros( + self, + project, + ): + """This test has a reference to a custom macro its configs""" + with pytest.raises(CompilationError) as exc: + run_dbt() + assert "Invalid generic test configuration" in str(exc) + + +class TestGenericTestsCustomNames: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": custom_generic_test_names__schema_yml, + "model_a.sql": custom_generic_test_names__model_a, + } + + # users can define custom names for specific instances of generic tests + def test_generic_tests_with_custom_names( + self, + project, + ): + """These tests don't collide, since they have user-provided custom names""" + results = run_dbt() + test_results = run_dbt(["test"]) + + # model + both tests run + assert len(results) == 1 + assert len(test_results) == 2 + + # custom names propagate to the unique_id + expected_unique_ids = [ + "test.test.not_null_where_1_equals_1.7b96089006", + "test.test.not_null_where_1_equals_2.8ae586e17f", + ] + assert test_results[0].node.unique_id in expected_unique_ids + assert test_results[1].node.unique_id in expected_unique_ids + + +class TestGenericTestsCustomNamesAltFormat(TestGenericTestsCustomNames): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": custom_generic_test_names_alt_format__schema_yml, + "model_a.sql": custom_generic_test_names_alt_format__model_a, + } + + # exactly as above, just alternative format for yaml definition + def test_collision_test_names_get_hash( + self, + project, + ): + """These tests don't collide, since they have user-provided custom names, + defined using an alternative format""" + super().test_generic_tests_with_custom_names(project) + + +class TestInvalidSchema: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": invalid_schema_models__schema_yml, + "model.sql": invalid_schema_models__model_sql, + } + + def test_invalid_schema_file( + self, + project, + ): + with pytest.raises(ParsingError) as exc: + run_dbt() + assert re.search(r"'models' is not a list", str(exc)) + + +class TestCommentedSchema: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": all_quotes_schema__schema_yml, + "model.sql": invalid_schema_models__model_sql, + } + + def test_quoted_schema_file(self, project): + try: + # A schema file consisting entirely of quotes should not be a problem + run_dbt(["parse"]) + except TypeError: + assert ( + False + ), "`dbt parse` failed with a yaml file that is all comments with the same exception as 3568" + except Exception: + assert False, "`dbt parse` failed with a yaml file that is all comments" + + +class TestWrongSpecificationBlock: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": wrong_specification_block__schema_yml} + + @pytest.fixture(scope="class") + def seeds(self): + return {"some_seed.csv": seeds__some_seed_csv} + + def test_wrong_specification_block( + self, + project, + ): + with pytest.warns(Warning): + results = run_dbt( + [ + "ls", + "-s", + "some_seed", + "--output", + "json", + "--output-keys", + "name", + "description", + ] + ) + + assert len(results) == 1 + assert results[0] == '{"name": "some_seed", "description": ""}' + + +class TestSchemaTestContextWhereSubq: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + test_context_where_subq_macros_file = { + "custom_generic_test.sql": test_context_where_subq_macros__custom_generic_test_sql + } + write_project_files( + project_root, "test-context-where-subq-macros", test_context_where_subq_macros_file + ) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": test_context_where_subq_models__schema_yml, + "model_a.sql": test_context_where_subq_models__model_a_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "macro-paths": ["test-context-where-subq-macros"], + } + + def test_test_context_tests( + self, + project, + ): + # This test tests that get_where_subquery() is included in TestContext + TestMacroNamespace, + # otherwise api.Relation.create() will return an error + results = run_dbt() + assert len(results) == 1 + + results = run_dbt(["test"]) + assert len(results) == 1 + + +class TestCustomSchemaTestMacroResolutionOrder: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + alt_local_utils_file = { + "dbt_project.yml": local_utils__dbt_project_yml, + "macros": { + "datediff.sql": alt_local_utils__macros__type_timestamp_sql, + }, + } + write_project_files(project_root, "alt_local_utils", alt_local_utils_file) + + macros_resolution_order_file = { + "my_custom_test.sql": macro_resolution_order_macros__my_custom_test_sql, + } + write_project_files( + project_root, "macro_resolution_order_macros", macros_resolution_order_file + ) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": macro_resolution_order_models__config_yml, + "my_model.sql": macro_resolution_order_models__my_model_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "macro-paths": ["macro_resolution_order_macros"], + } + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "alt_local_utils"}]} + + def test_macro_resolution_test_namespace( + self, + project, + ): + # https://github.com/dbt-labs/dbt-core/issues/5720 + # Previously, macros called as 'dbt.some_macro' would not correctly + # resolve to 'some_macro' from the 'dbt' namespace during static analysis, + # if 'some_macro' also existed in an installed package, + # leading to the macro being missing in the TestNamespace + run_dbt(["deps"]) + run_dbt(["parse"]) diff --git a/tests/functional/selected_resources/fixtures.py b/tests/functional/selected_resources/fixtures.py new file mode 100644 index 000000000..8d022b07e --- /dev/null +++ b/tests/functional/selected_resources/fixtures.py @@ -0,0 +1,35 @@ +on_run_start_macro_assert_selected_models_expected_list = """ +{% macro assert_selected_models_expected_list(expected_list) %} + + {% if execute and (expected_list is not none) %} + + {% set sorted_selected_resources = selected_resources | sort %} + {% set sorted_expected_list = expected_list | sort %} + + {% if sorted_selected_resources != sorted_expected_list %} + {{ exceptions.raise_compiler_error("FAIL: sorted_selected_resources" ~ sorted_selected_resources ~ " is different from " ~ sorted_expected_list) }} + {% endif %} + + {% endif %} + +{% endmacro %} +""" + + +my_model1 = """ +select 1 as id +""" + +my_model2 = """ +select * from {{ ref('model1') }} +""" + +my_snapshot = """ +{% snapshot cc_all_snapshot %} + {{ config( + check_cols='all', unique_key='id', strategy='check', + target_database=database, target_schema=schema + ) }} + select * from {{ ref('model2') }} +{% endsnapshot %} +""" diff --git a/tests/functional/selected_resources/test_selected_resources.py b/tests/functional/selected_resources/test_selected_resources.py new file mode 100644 index 000000000..5c7a3c39a --- /dev/null +++ b/tests/functional/selected_resources/test_selected_resources.py @@ -0,0 +1,105 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.selected_resources.fixtures import ( + my_model1, + my_model2, + my_snapshot, + on_run_start_macro_assert_selected_models_expected_list, +) + + +@pytest.fixture(scope="class") +def macros(): + return { + "assert_selected_models_expected_list.sql": on_run_start_macro_assert_selected_models_expected_list, + } + + +@pytest.fixture(scope="class") +def models(): + return {"model1.sql": my_model1, "model2.sql": my_model2} + + +@pytest.fixture(scope="class") +def snapshots(): + return { + "my_snapshot.sql": my_snapshot, + } + + +@pytest.fixture(scope="class") +def project_config_update(): + return { + "on-run-start": "{{ assert_selected_models_expected_list(var('expected_list',None)) }}", + } + + +@pytest.fixture +def build_all(project): + run_dbt(["build"]) + + +@pytest.mark.usefixtures("build_all") +class TestSelectedResources: + def test_selected_resources_build_selector(self, project): + results = run_dbt( + [ + "build", + "--select", + "model1+", + "--vars", + '{"expected_list": ["model.test.model1", "model.test.model2", "snapshot.test.cc_all_snapshot"]}', + ] + ) + assert results[0].status == "success" + + def test_selected_resources_build_selector_subgraph(self, project): + results = run_dbt( + [ + "build", + "--select", + "model2+", + "--vars", + '{"expected_list": ["model.test.model2", "snapshot.test.cc_all_snapshot"]}', + ] + ) + assert results[0].status == "success" + + def test_selected_resources_run(self, project): + results = run_dbt( + [ + "run", + "--select", + "model1+", + "--vars", + '{"expected_list": ["model.test.model2", "model.test.model1"]}', + ] + ) + assert results[0].status == "success" + + def test_selected_resources_build_no_selector(self, project): + results = run_dbt( + [ + "build", + "--vars", + '{"expected_list": ["model.test.model1", "model.test.model2", "snapshot.test.cc_all_snapshot"]}', + ] + ) + assert results[0].status == "success" + + def test_selected_resources_build_no_model(self, project): + results = run_dbt( + [ + "build", + "--select", + "model_that_does_not_exist", + "--vars", + '{"expected_list": []}', + ] + ) + assert not results + + def test_selected_resources_test_no_model(self, project): + results = run_dbt(["test", "--select", "model1+", "--vars", '{"expected_list": []}']) + assert not results diff --git a/tests/functional/semantic_models/fixtures.py b/tests/functional/semantic_models/fixtures.py new file mode 100644 index 000000000..3d1779a07 --- /dev/null +++ b/tests/functional/semantic_models/fixtures.py @@ -0,0 +1,322 @@ +# NOTE: these fixtures also get used in `/tests/functional/saved_queries/` +simple_metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day +""" + +models_people_sql = """ +select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at +union all +select 2 as id, 'Jeremy' as first_name, 'Cohen' as last_name, 'indigo' as favorite_color, true as loves_dbt, 4 as tenure, current_timestamp as created_at +union all +select 3 as id, 'Callum' as first_name, 'McCann' as last_name, 'emerald' as favorite_color, true as loves_dbt, 0 as tenure, current_timestamp as created_at +""" + +groups_yml = """ +version: 2 + +groups: + - name: some_group + owner: + email: me@gmail.com + - name: some_other_group + owner: + email: me@gmail.com +""" + +models_people_metrics_yml = """ +version: 2 + +metrics: + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' +""" + +disabled_models_people_metrics_yml = """ +version: 2 + +metrics: + - name: number_of_people + config: + enabled: false + group: some_group + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' +""" + +semantic_model_people_yml = """ +version: 2 + +semantic_models: + - name: semantic_people + label: "Semantic People" + model: ref('people') + dimensions: + - name: favorite_color + label: "Favorite Color" + type: categorical + - name: created_at + label: "Created At" + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + label: "Years Tenure" + agg: SUM + expr: tenure + - name: people + label: "People" + agg: count + expr: id + entities: + - name: id + label: "Primary ID" + type: primary + defaults: + agg_time_dimension: created_at +""" + +semantic_model_people_diff_name_yml = """ +version: 2 + +semantic_models: + - name: semantic_people_diff_name + label: "Semantic People" + model: ref('people') + dimensions: + - name: favorite_color + label: "Favorite Color" + type: categorical + - name: created_at + label: "Created At" + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + label: "Years Tenure" + agg: SUM + expr: tenure + - name: people + label: "People" + agg: count + expr: id + entities: + - name: id + label: "Primary ID" + type: primary + defaults: + agg_time_dimension: created_at +""" + +semantic_model_descriptions = """ +{% docs semantic_model_description %} foo {% enddocs %} +{% docs dimension_description %} bar {% enddocs %} +{% docs measure_description %} baz {% enddocs %} +{% docs entity_description %} qux {% enddocs %} +""" + +semantic_model_people_yml_with_docs = """ +version: 2 + +semantic_models: + - name: semantic_people + model: ref('people') + description: "{{ doc('semantic_model_description') }}" + dimensions: + - name: favorite_color + type: categorical + description: "{{ doc('dimension_description') }}" + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + description: "{{ doc('measure_description') }}" + - name: people + agg: count + expr: id + entities: + - name: id + description: "{{ doc('entity_description') }}" + type: primary + defaults: + agg_time_dimension: created_at +""" + +enabled_semantic_model_people_yml = """ +version: 2 + +semantic_models: + - name: semantic_people + label: "Semantic People" + model: ref('people') + config: + enabled: true + group: some_group + meta: + my_meta: 'testing' + my_other_meta: 'testing more' + dimensions: + - name: favorite_color + type: categorical + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + - name: people + agg: count + expr: id + entities: + - name: id + type: primary + defaults: + agg_time_dimension: created_at +""" + +disabled_semantic_model_people_yml = """ +version: 2 + +semantic_models: + - name: semantic_people + label: "Semantic People" + model: ref('people') + config: + enabled: false + dimensions: + - name: favorite_color + type: categorical + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + - name: people + agg: count + expr: id + entities: + - name: id + type: primary + defaults: + agg_time_dimension: created_at +""" + + +schema_yml = """models: + - name: fct_revenue + description: This is the model fct_revenue. It should be able to use doc blocks + +semantic_models: + - name: revenue + description: This is the revenue semantic model. It should be able to use doc blocks + model: ref('fct_revenue') + + defaults: + agg_time_dimension: ds + + measures: + - name: txn_revenue + expr: revenue + agg: sum + agg_time_dimension: ds + create_metric: true + - name: sum_of_things + expr: 2 + agg: sum + agg_time_dimension: ds + - name: has_revenue + expr: true + agg: sum_boolean + agg_time_dimension: ds + - name: discrete_order_value_p99 + expr: order_total + agg: percentile + agg_time_dimension: ds + agg_params: + percentile: 0.99 + use_discrete_percentile: True + use_approximate_percentile: False + - name: test_agg_params_optional_are_empty + expr: order_total + agg: percentile + agg_time_dimension: ds + agg_params: + percentile: 0.99 + - name: test_non_additive + expr: txn_revenue + agg: sum + non_additive_dimension: + name: ds + window_choice: max + + dimensions: + - name: ds + type: time + expr: created_at + type_params: + time_granularity: day + + entities: + - name: user + type: foreign + expr: user_id + - name: id + type: primary + +metrics: + - name: simple_metric + label: Simple Metric + type: simple + type_params: + measure: sum_of_things +""" + +schema_without_semantic_model_yml = """models: + - name: fct_revenue + description: This is the model fct_revenue. It should be able to use doc blocks +""" + +fct_revenue_sql = """select + 1 as id, + 10 as user_id, + 1000 as revenue, + current_timestamp as created_at""" + +metricflow_time_spine_sql = """ +with days as ( + {{dbt_utils.date_spine('day' + , "to_date('01/01/2000','mm/dd/yyyy')" + , "to_date('01/01/2027','mm/dd/yyyy')" + ) + }} +), + +final as ( + select cast(date_day as date) as date_day + from days +) + +select * +from final +""" diff --git a/tests/functional/semantic_models/test_semantic_model_configs.py b/tests/functional/semantic_models/test_semantic_model_configs.py new file mode 100644 index 000000000..7d32d744d --- /dev/null +++ b/tests/functional/semantic_models/test_semantic_model_configs.py @@ -0,0 +1,227 @@ +from dbt.contracts.graph.model_config import SemanticModelConfig +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, run_dbt, update_config_file +import pytest + +from tests.functional.semantic_models.fixtures import ( + disabled_models_people_metrics_yml, + disabled_semantic_model_people_yml, + enabled_semantic_model_people_yml, + groups_yml, + metricflow_time_spine_sql, + models_people_metrics_yml, + models_people_sql, + semantic_model_people_yml, +) + + +# Test disabled config at semantic_models level in yaml file +class TestConfigYamlLevel: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": disabled_semantic_model_people_yml, + "people_metrics.yml": disabled_models_people_metrics_yml, + "groups.yml": groups_yml, + } + + def test_yaml_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "semantic_model.test.semantic_people" not in manifest.semantic_models + assert "semantic_model.test.semantic_people" in manifest.disabled + + assert "group.test.some_group" in manifest.groups + assert "semantic_model.test.semantic_people" not in manifest.groups + + +# Test disabled config at semantic_models level with a still enabled metric +class TestDisabledConfigYamlLevelEnabledMetric: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": disabled_semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + def test_yaml_level(self, project): + with pytest.raises( + ParsingError, + match="The measure `people` is referenced on disabled semantic model `semantic_people`.", + ): + run_dbt(["parse"]) + + +# Test disabling semantic model config but not metric config in dbt_project.yml +class TestMismatchesConfigProjectLevel: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "semantic-models": { + "test": { + "enabled": True, + } + } + } + + def test_project_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "semantic_model.test.semantic_people" in manifest.semantic_models + assert "group.test.some_group" in manifest.groups + assert manifest.semantic_models["semantic_model.test.semantic_people"].group is None + + new_enabled_config = { + "semantic-models": { + "test": { + "enabled": False, + } + } + } + update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") + with pytest.raises( + ParsingError, + match="The measure `people` is referenced on disabled semantic model `semantic_people`.", + ): + run_dbt(["parse"]) + + +# Test disabling semantic model and metric configs in dbt_project.yml +class TestConfigProjectLevel: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "semantic-models": { + "test": { + "enabled": True, + } + }, + "metrics": { + "test": { + "enabled": True, + } + }, + } + + def test_project_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "semantic_model.test.semantic_people" in manifest.semantic_models + assert "group.test.some_group" in manifest.groups + assert "group.test.some_other_group" in manifest.groups + assert manifest.semantic_models["semantic_model.test.semantic_people"].group is None + + new_group_config = { + "semantic-models": { + "test": { + "group": "some_other_group", + } + }, + } + update_config_file(new_group_config, project.project_root, "dbt_project.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + assert "semantic_model.test.semantic_people" in manifest.semantic_models + assert "group.test.some_other_group" in manifest.groups + assert "group.test.some_group" in manifest.groups + assert ( + manifest.semantic_models["semantic_model.test.semantic_people"].group + == "some_other_group" + ) + + new_enabled_config = { + "semantic-models": { + "test": { + "enabled": False, + } + }, + "metrics": { + "test": { + "enabled": False, + } + }, + } + update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + assert "semantic_model.test.semantic_people" not in manifest.semantic_models + assert "semantic_model.test.semantic_people" in manifest.disabled + + assert "group.test.some_group" in manifest.groups + assert "semantic_model.test.semantic_people" not in manifest.groups + + +# Test inheritence - set configs at project and semantic_model level - expect semantic_model level to win +class TestConfigsInheritence: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": enabled_semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"semantic-models": {"enabled": False}} + + def test_project_plus_yaml_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "semantic_model.test.semantic_people" in manifest.semantic_models + config_test_table = manifest.semantic_models.get( + "semantic_model.test.semantic_people" + ).config + + assert isinstance(config_test_table, SemanticModelConfig) + + +# test setting meta attributes in semantic model config +class TestMetaConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": enabled_semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + def test_meta_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + sm_id = "semantic_model.test.semantic_people" + assert sm_id in manifest.semantic_models + sm_node = manifest.semantic_models[sm_id] + meta_expected = {"my_meta": "testing", "my_other_meta": "testing more"} + assert sm_node.config.meta == meta_expected diff --git a/tests/functional/semantic_models/test_semantic_model_parsing.py b/tests/functional/semantic_models/test_semantic_model_parsing.py new file mode 100644 index 000000000..c7a7cda0c --- /dev/null +++ b/tests/functional/semantic_models/test_semantic_model_parsing.py @@ -0,0 +1,148 @@ +from typing import List + +from dbt.contracts.graph.manifest import Manifest +from dbt.tests.util import write_file +from dbt_common.events.base_types import BaseEvent +from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity +import pytest + +from tests.functional.dbt_runner import dbtTestRunner +from tests.functional.semantic_models.fixtures import ( + fct_revenue_sql, + metricflow_time_spine_sql, + schema_without_semantic_model_yml, + schema_yml, +) + + +class TestSemanticModelParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + } + + def test_semantic_model_parsing(self, project): + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + manifest = result.result + assert len(manifest.semantic_models) == 1 + semantic_model = manifest.semantic_models["semantic_model.test.revenue"] + assert semantic_model.node_relation.alias == "fct_revenue" + assert ( + semantic_model.node_relation.relation_name + == f'"dbt"."{project.test_schema}"."fct_revenue"' + ) + assert len(semantic_model.measures) == 6 + # manifest should have one metric (that was created from a measure) + assert len(manifest.metrics) == 2 + metric = manifest.metrics["metric.test.txn_revenue"] + assert metric.name == "txn_revenue" + + def test_semantic_model_error(self, project): + # Next, modify the default schema.yml to remove the semantic model. + error_schema_yml = schema_yml.replace("sum_of_things", "has_revenue") + write_file(error_schema_yml, project.project_root, "models", "schema.yml") + events: List[BaseEvent] = [] + runner = dbtTestRunner(callbacks=[events.append]) + result = runner.invoke(["parse"]) + assert not result.success + + validation_errors = [e for e in events if e.info.name == "SemanticValidationFailure"] + assert validation_errors + + +class TestSemanticModelPartialParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + } + + def test_semantic_model_changed_partial_parsing(self, project): + # First, use the default schema.yml to define our semantic model, and + # run the dbt parse command + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + + # Next, modify the default schema.yml to change a detail of the semantic + # model. + modified_schema_yml = schema_yml.replace("time_granularity: day", "time_granularity: week") + write_file(modified_schema_yml, project.project_root, "models", "schema.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Finally, verify that the manifest reflects the partially parsed change + manifest = result.result + semantic_model = manifest.semantic_models["semantic_model.test.revenue"] + assert semantic_model.dimensions[0].type_params.time_granularity == TimeGranularity.WEEK + + def test_semantic_model_deleted_partial_parsing(self, project): + # First, use the default schema.yml to define our semantic model, and + # run the dbt parse command + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + assert "semantic_model.test.revenue" in result.result.semantic_models + + # Next, modify the default schema.yml to remove the semantic model. + write_file(schema_without_semantic_model_yml, project.project_root, "models", "schema.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Finally, verify that the manifest reflects the deletion + assert "semantic_model.test.revenue" not in result.result.semantic_models + + def test_semantic_model_flipping_create_metric_partial_parsing(self, project): + generated_metric = "metric.test.txn_revenue" + # First, use the default schema.yml to define our semantic model, and + # run the dbt parse command + write_file(schema_yml, project.project_root, "models", "schema.yml") + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + + # Verify the metric created by `create_metric: true` exists + metric = result.result.metrics[generated_metric] + assert metric.name == "txn_revenue" + + # --- Next, modify the default schema.yml to have no `create_metric: true` --- + no_create_metric_schema_yml = schema_yml.replace( + "create_metric: true", "create_metric: false" + ) + write_file(no_create_metric_schema_yml, project.project_root, "models", "schema.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Verify the metric originally created by `create_metric: true` was removed + assert result.result.metrics.get(generated_metric) is None + + # Verify that partial parsing didn't clobber the normal metric + assert result.result.metrics.get("metric.test.simple_metric") is not None + + # --- Now bring it back --- + create_metric_schema_yml = schema_yml.replace( + "create_metric: false", "create_metric: true" + ) + write_file(create_metric_schema_yml, project.project_root, "models", "schema.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Verify the metric originally created by `create_metric: true` was removed + metric = result.result.metrics[generated_metric] + assert metric.name == "txn_revenue" diff --git a/tests/functional/semantic_models/test_semantic_models.py b/tests/functional/semantic_models/test_semantic_models.py new file mode 100644 index 000000000..d984d0886 --- /dev/null +++ b/tests/functional/semantic_models/test_semantic_models.py @@ -0,0 +1,98 @@ +from dbt.contracts.graph.manifest import Manifest +from dbt.tests.util import run_dbt, write_file +from dbt_common.exceptions import CompilationError +import pytest + +from tests.functional.semantic_models.fixtures import ( + models_people_metrics_yml, + models_people_sql, + semantic_model_descriptions, + semantic_model_people_diff_name_yml, + semantic_model_people_yml, + semantic_model_people_yml_with_docs, + simple_metricflow_time_spine_sql, +) + + +class TestSemanticModelDependsOn: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": simple_metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + } + + def test_depends_on(self, project): + manifest = run_dbt(["parse"]) + assert isinstance(manifest, Manifest) + + expected_depends_on_for_people_semantic_model = ["model.test.people"] + + number_of_people_metric = manifest.semantic_models["semantic_model.test.semantic_people"] + assert ( + number_of_people_metric.depends_on.nodes + == expected_depends_on_for_people_semantic_model + ) + + +class TestSemanticModelNestedDocs: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": simple_metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml_with_docs, + "people_metrics.yml": models_people_metrics_yml, + "docs.md": semantic_model_descriptions, + } + + def test_depends_on(self, project): + manifest = run_dbt(["parse"]) + node = manifest.semantic_models["semantic_model.test.semantic_people"] + + assert node.description == "foo" + assert node.dimensions[0].description == "bar" + assert node.measures[0].description == "baz" + assert node.entities[0].description == "qux" + + +class TestSemanticModelUnknownModel: + @pytest.fixture(scope="class") + def models(self): + return { + "not_people.sql": models_people_sql, + "metricflow_time_spine.sql": simple_metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + } + + def test_unknown_model_raises_issue(self, project): + with pytest.raises(CompilationError) as excinfo: + run_dbt(["parse"]) + assert "depends on a node named 'people' which was not found" in str(excinfo.value) + + +class TestSemanticModelPartialParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": simple_metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + } + + def test_semantic_model_deleted_partial_parsing(self, project): + # First, use the default saved_queries.yml to define our saved_query, and + # run the dbt parse command + run_dbt(["parse"]) + # Next, modify the default semantic_models.yml to remove the saved query. + write_file( + semantic_model_people_diff_name_yml, + project.project_root, + "models", + "semantic_models.yml", + ) + run_dbt(["compile"]) diff --git a/tests/functional/show/fixtures.py b/tests/functional/show/fixtures.py new file mode 100644 index 000000000..1fc9b9fd7 --- /dev/null +++ b/tests/functional/show/fixtures.py @@ -0,0 +1,109 @@ +models__sample_model = """ +select * from {{ ref('sample_seed') }} +""" + +models__sample_number_model = """ +select + cast(1.0 as int) as float_to_int_field, + 3.0 as float_field, + 4.3 as float_with_dec_field, + 5 as int_field +""" + +models__sample_number_model_with_nulls = """ +select + cast(1.0 as int) as float_to_int_field, + 3.0 as float_field, + 4.3 as float_with_dec_field, + 5 as int_field + +union all + +select + cast(null as int) as float_to_int_field, + cast(null as float) as float_field, + cast(null as float) as float_with_dec_field, + cast(null as int) as int_field + +""" + +models__second_model = """ +select + sample_num as col_one, + sample_bool as col_two, + 42 as answer +from {{ ref('sample_model') }} +""" + +models__sql_header = """ +{% call set_sql_header(config) %} +set session time zone '{{ var("timezone", "Europe/Paris") }}'; +{%- endcall %} +select current_setting('timezone') as timezone +""" + +private_model_yml = """ +groups: + - name: my_cool_group + owner: {name: me} + +models: + - name: private_model + access: private + config: + group: my_cool_group +""" + + +schema_yml = """ +models: + - name: sample_model + latest_version: 1 + + # declare the versions, and fully specify them + versions: + - v: 2 + config: + materialized: table + columns: + - name: sample_num + data_type: int + - name: sample_bool + data_type: bool + - name: answer + data_type: int + + - v: 1 + config: + materialized: table + contract: {enforced: true} + columns: + - name: sample_num + data_type: int + - name: sample_bool + data_type: bool +""" + +models__ephemeral_model = """ +{{ config(materialized = 'ephemeral') }} +select + coalesce(sample_num, 0) + 10 as col_deci +from {{ ref('sample_model') }} +""" + +models__second_ephemeral_model = """ +{{ config(materialized = 'ephemeral') }} +select + col_deci + 100 as col_hundo +from {{ ref('ephemeral_model') }} +""" + +seeds__sample_seed = """sample_num,sample_bool +1,true +2,false +3,true +4,false +5,true +6,false +7,true +""" diff --git a/tests/functional/show/test_show.py b/tests/functional/show/test_show.py new file mode 100644 index 000000000..5eb711270 --- /dev/null +++ b/tests/functional/show/test_show.py @@ -0,0 +1,194 @@ +from dbt.tests.util import run_dbt, run_dbt_and_capture +from dbt_common.exceptions import DbtBaseException, DbtRuntimeError +import pytest + +from tests.functional.show.fixtures import ( + models__ephemeral_model, + models__sample_model, + models__sample_number_model, + models__sample_number_model_with_nulls, + models__second_ephemeral_model, + models__second_model, + private_model_yml, + schema_yml, + seeds__sample_seed, +) + + +class ShowBase: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": models__sample_model, + "sample_number_model.sql": models__sample_number_model, + "sample_number_model_with_nulls.sql": models__sample_number_model_with_nulls, + "second_model.sql": models__second_model, + "ephemeral_model.sql": models__ephemeral_model, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"sample_seed.csv": seeds__sample_seed} + + @pytest.fixture(scope="class", autouse=True) + def setup(self, project): + run_dbt(["seed"]) + + +class TestShowNone(ShowBase): + def test_none(self, project): + with pytest.raises( + DbtRuntimeError, match="Either --select or --inline must be passed to show" + ): + run_dbt(["show"]) + + +class TestShowSelectText(ShowBase): + def test_select_model_text(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture(["show", "--select", "second_model"]) + assert "Previewing node 'sample_model'" not in log_output + assert "Previewing node 'second_model'" in log_output + assert "col_one" in log_output + assert "col_two" in log_output + assert "answer" in log_output + + +class TestShowMultiple(ShowBase): + def test_select_multiple_model_text(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture(["show", "--select", "sample_model second_model"]) + assert "Previewing node 'sample_model'" in log_output + assert "sample_num" in log_output + assert "sample_bool" in log_output + + +class TestShowSingle(ShowBase): + def test_select_single_model_json(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture( + ["show", "--select", "sample_model", "--output", "json"] + ) + assert "Previewing node 'sample_model'" not in log_output + assert "sample_num" in log_output + assert "sample_bool" in log_output + + +class TestShowNumeric(ShowBase): + def test_numeric_values(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture( + ["show", "--select", "sample_number_model", "--output", "json"] + ) + # json log output needs the escapes removed for string matching + log_output = log_output.replace("\\", "") + assert "Previewing node 'sample_number_model'" not in log_output + assert '"float_to_int_field": 1.0' not in log_output + assert '"float_to_int_field": 1' in log_output + assert '"float_field": 3.0' in log_output + assert '"float_with_dec_field": 4.3' in log_output + assert '"int_field": 5' in log_output + assert '"int_field": 5.0' not in log_output + + +class TestShowNumericNulls(ShowBase): + def test_numeric_values_with_nulls(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture( + ["show", "--select", "sample_number_model_with_nulls", "--output", "json"] + ) + # json log output needs the escapes removed for string matching + log_output = log_output.replace("\\", "") + assert "Previewing node 'sample_number_model_with_nulls'" not in log_output + assert '"float_to_int_field": 1.0' not in log_output + assert '"float_to_int_field": 1' in log_output + assert '"float_field": 3.0' in log_output + assert '"float_with_dec_field": 4.3' in log_output + assert '"int_field": 5' in log_output + assert '"int_field": 5.0' not in log_output + + +class TestShowInline(ShowBase): + def test_inline_pass(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture( + ["show", "--inline", "select * from {{ ref('sample_model') }}"] + ) + assert "Previewing inline node" in log_output + assert "sample_num" in log_output + assert "sample_bool" in log_output + + +class TestShowInlineFail(ShowBase): + def test_inline_fail(self, project): + with pytest.raises(DbtBaseException, match="Error parsing inline query"): + run_dbt(["show", "--inline", "select * from {{ ref('third_model') }}"]) + + +class TestShowInlineFailDB(ShowBase): + def test_inline_fail_database_error(self, project): + with pytest.raises(DbtRuntimeError, match="Database Error"): + run_dbt(["show", "--inline", "slect asdlkjfsld;j"]) + + +class TestShowEphemeral(ShowBase): + def test_ephemeral_model(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture(["show", "--select", "ephemeral_model"]) + assert "col_deci" in log_output + + +class TestShowSecondEphemeral(ShowBase): + def test_second_ephemeral_model(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture(["show", "--inline", models__second_ephemeral_model]) + assert "col_hundo" in log_output + + +class TestShowSeed(ShowBase): + def test_seed(self, project): + (_, log_output) = run_dbt_and_capture(["show", "--select", "sample_seed"]) + assert "Previewing node 'sample_seed'" in log_output + + +class TestShowModelVersions: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": schema_yml, + "sample_model.sql": models__sample_model, + "sample_model_v2.sql": models__second_model, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"sample_seed.csv": seeds__sample_seed} + + def test_version_unspecified(self, project): + run_dbt(["build"]) + (results, log_output) = run_dbt_and_capture(["show", "--select", "sample_model"]) + assert "Previewing node 'sample_model.v1'" in log_output + assert "Previewing node 'sample_model.v2'" in log_output + + def test_none(self, project): + run_dbt(["build"]) + (results, log_output) = run_dbt_and_capture(["show", "--select", "sample_model.v2"]) + assert "Previewing node 'sample_model.v1'" not in log_output + assert "Previewing node 'sample_model.v2'" in log_output + + +class TestShowPrivateModel: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": private_model_yml, + "private_model.sql": models__sample_model, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"sample_seed.csv": seeds__sample_seed} + + def test_version_unspecified(self, project): + run_dbt(["build"]) + run_dbt(["show", "--inline", "select * from {{ ref('private_model') }}"]) diff --git a/tests/functional/simple_snapshot/data/invalidate_postgres.sql b/tests/functional/simple_snapshot/data/invalidate_postgres.sql new file mode 100644 index 000000000..b0bef3c6c --- /dev/null +++ b/tests/functional/simple_snapshot/data/invalidate_postgres.sql @@ -0,0 +1,27 @@ + +-- update records 11 - 21. Change email and updated_at field +update {schema}.seed set + updated_at = updated_at + interval '1 hour', + email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end +where id >= 10 and id <= 20; + + +-- invalidate records 11 - 21 +update {schema}.snapshot_expected set + dbt_valid_to = updated_at + interval '1 hour' +where id >= 10 and id <= 20; + + +update {schema}.snapshot_castillo_expected set + dbt_valid_to = "1-updated_at" + interval '1 hour' +where id >= 10 and id <= 20; + + +update {schema}.snapshot_alvarez_expected set + dbt_valid_to = updated_at + interval '1 hour' +where id >= 10 and id <= 20; + + +update {schema}.snapshot_kelly_expected set + dbt_valid_to = updated_at + interval '1 hour' +where id >= 10 and id <= 20; diff --git a/tests/functional/simple_snapshot/data/seed_pg.sql b/tests/functional/simple_snapshot/data/seed_pg.sql new file mode 100644 index 000000000..a22a2359c --- /dev/null +++ b/tests/functional/simple_snapshot/data/seed_pg.sql @@ -0,0 +1,223 @@ + create table {database}.{schema}.seed ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + updated_at TIMESTAMP WITHOUT TIME ZONE +); + +create table {database}.{schema}.snapshot_expected ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + + -- snapshotting fields + updated_at TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, + dbt_scd_id TEXT, + dbt_updated_at TIMESTAMP WITHOUT TIME ZONE +); + + +-- seed inserts +-- use the same email for two users to verify that duplicated check_cols values +-- are handled appropriately +insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values +(1, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), +(2, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), +(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), +(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), +(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), +(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), +(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), +(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), +(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), +(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), +(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), +(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), +(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), +(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), +(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), +(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), +(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), +(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), +(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), +(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); + + +-- populate snapshot table +insert into {database}.{schema}.snapshot_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed; + + + +create table {database}.{schema}.snapshot_castillo_expected ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + + -- snapshotting fields + "1-updated_at" TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, + dbt_scd_id TEXT, + dbt_updated_at TIMESTAMP WITHOUT TIME ZONE +); + +-- one entry +insert into {database}.{schema}.snapshot_castillo_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + "1-updated_at", + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed where last_name = 'Castillo'; + +create table {database}.{schema}.snapshot_alvarez_expected ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + + -- snapshotting fields + updated_at TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, + dbt_scd_id TEXT, + dbt_updated_at TIMESTAMP WITHOUT TIME ZONE +); + +-- 0 entries +insert into {database}.{schema}.snapshot_alvarez_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed where last_name = 'Alvarez'; + +create table {database}.{schema}.snapshot_kelly_expected ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + + -- snapshotting fields + updated_at TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, + dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, + dbt_scd_id TEXT, + dbt_updated_at TIMESTAMP WITHOUT TIME ZONE +); + + +-- 2 entries +insert into {database}.{schema}.snapshot_kelly_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed where last_name = 'Kelly'; diff --git a/tests/functional/simple_snapshot/data/shared_macros.sql b/tests/functional/simple_snapshot/data/shared_macros.sql new file mode 100644 index 000000000..9bdfdd264 --- /dev/null +++ b/tests/functional/simple_snapshot/data/shared_macros.sql @@ -0,0 +1,80 @@ +{% macro get_snapshot_unique_id() -%} + {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} +{%- endmacro %} + +{% macro default__get_snapshot_unique_id() -%} + {% do return("id || '-' || first_name") %} +{%- endmacro %} + +{# + mostly copy+pasted from dbt_utils, but I removed some parameters and added + a query that calls get_snapshot_unique_id +#} +{% test mutually_exclusive_ranges(model) %} + +with base as ( + select {{ get_snapshot_unique_id() }} as dbt_unique_id, + * + from {{ model }} +), +window_functions as ( + + select + dbt_valid_from as lower_bound, + coalesce(dbt_valid_to, '2099-1-1T00:00:01') as upper_bound, + + lead(dbt_valid_from) over ( + partition by dbt_unique_id + order by dbt_valid_from + ) as next_lower_bound, + + row_number() over ( + partition by dbt_unique_id + order by dbt_valid_from desc + ) = 1 as is_last_record + + from base + +), + +calc as ( + -- We want to return records where one of our assumptions fails, so we'll use + -- the `not` function with `and` statements so we can write our assumptions nore cleanly + select + *, + + -- For each record: lower_bound should be < upper_bound. + -- Coalesce it to return an error on the null case (implicit assumption + -- these columns are not_null) + coalesce( + lower_bound < upper_bound, + is_last_record + ) as lower_bound_less_than_upper_bound, + + -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound. + -- Coalesce it to handle null cases for the last record. + coalesce( + upper_bound = next_lower_bound, + is_last_record, + false + ) as upper_bound_equal_to_next_lower_bound + + from window_functions + +), + +validation_errors as ( + + select + * + from calc + + where not( + -- THE FOLLOWING SHOULD BE TRUE -- + lower_bound_less_than_upper_bound + and upper_bound_equal_to_next_lower_bound + ) +) + +select * from validation_errors +{% endtest %} diff --git a/tests/functional/simple_snapshot/data/update.sql b/tests/functional/simple_snapshot/data/update.sql new file mode 100644 index 000000000..890959f32 --- /dev/null +++ b/tests/functional/simple_snapshot/data/update.sql @@ -0,0 +1,261 @@ +-- insert v2 of the 11 - 21 records + +insert into {database}.{schema}.snapshot_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed +where id >= 10 and id <= 20; + + +insert into {database}.{schema}.snapshot_castillo_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + "1-updated_at", + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed +where id >= 10 and id <= 20 and last_name = 'Castillo'; + + +insert into {database}.{schema}.snapshot_alvarez_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed +where id >= 10 and id <= 20 and last_name = 'Alvarez'; + + +insert into {database}.{schema}.snapshot_kelly_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed +where id >= 10 and id <= 20 and last_name = 'Kelly'; + +-- insert 10 new records +insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values +(21, 'Judy', 'Robinson', 'jrobinsonk@blogs.com', 'Female', '208.21.192.232', '2016-09-18 08:27:38'), +(22, 'Kevin', 'Alvarez', 'kalvarezl@buzzfeed.com', 'Male', '228.106.146.9', '2016-07-29 03:07:37'), +(23, 'Barbara', 'Carr', 'bcarrm@pen.io', 'Female', '106.165.140.17', '2015-09-24 13:27:23'), +(24, 'William', 'Watkins', 'wwatkinsn@guardian.co.uk', 'Male', '78.155.84.6', '2016-03-08 19:13:08'), +(25, 'Judy', 'Cooper', 'jcoopero@google.com.au', 'Female', '24.149.123.184', '2016-10-05 20:49:33'), +(26, 'Shirley', 'Castillo', 'scastillop@samsung.com', 'Female', '129.252.181.12', '2016-06-20 21:12:21'), +(27, 'Justin', 'Harper', 'jharperq@opera.com', 'Male', '131.172.103.218', '2016-05-21 22:56:46'), +(28, 'Marie', 'Medina', 'mmedinar@nhs.uk', 'Female', '188.119.125.67', '2015-10-08 13:44:33'), +(29, 'Kelly', 'Edwards', 'kedwardss@phoca.cz', 'Female', '47.121.157.66', '2015-09-15 06:33:37'), +(30, 'Carl', 'Coleman', 'ccolemant@wikipedia.org', 'Male', '82.227.154.83', '2016-05-26 16:46:40'); + + +-- add these new records to the snapshot table +insert into {database}.{schema}.snapshot_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed +where id > 20; + + +-- add these new records to the snapshot table +insert into {database}.{schema}.snapshot_castillo_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + "1-updated_at", + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed +where id > 20 and last_name = 'Castillo'; + +insert into {database}.{schema}.snapshot_alvarez_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed +where id > 20 and last_name = 'Alvarez'; + +insert into {database}.{schema}.snapshot_kelly_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + dbt_valid_from, + dbt_valid_to, + dbt_updated_at, + dbt_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as dbt_valid_from, + null::timestamp as dbt_valid_to, + updated_at as dbt_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id +from {database}.{schema}.seed +where id > 20 and last_name = 'Kelly'; diff --git a/tests/functional/simple_snapshot/fixtures.py b/tests/functional/simple_snapshot/fixtures.py new file mode 100644 index 000000000..04e4905d4 --- /dev/null +++ b/tests/functional/simple_snapshot/fixtures.py @@ -0,0 +1,389 @@ +snapshots_select__snapshot_sql = """ +{% snapshot snapshot_castillo %} + + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='timestamp', + updated_at='"1-updated_at"', + ) + }} + select id,first_name,last_name,email,gender,ip_address,updated_at as "1-updated_at" from {{target.database}}.{{schema}}.seed where last_name = 'Castillo' + +{% endsnapshot %} + +{% snapshot snapshot_alvarez %} + + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='timestamp', + updated_at='updated_at', + ) + }} + select * from {{target.database}}.{{schema}}.seed where last_name = 'Alvarez' + +{% endsnapshot %} + + +{% snapshot snapshot_kelly %} + {# This has no target_database set, which is allowed! #} + {{ + config( + target_schema=schema, + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='timestamp', + updated_at='updated_at', + ) + }} + select * from {{target.database}}.{{schema}}.seed where last_name = 'Kelly' + +{% endsnapshot %} +""" + +snapshots_pg_custom__snapshot_sql = """ +{% snapshot snapshot_actual %} + + {{ + config( + target_database=var('target_database', database), + target_schema=var('target_schema', schema), + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='custom', + updated_at='updated_at', + ) + }} + select * from {{target.database}}.{{target.schema}}.seed + +{% endsnapshot %} +""" + + +macros_custom_snapshot__custom_sql = """ +{# A "custom" strategy that's really just the timestamp one #} +{% macro snapshot_custom_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} + {% set primary_key = config['unique_key'] %} + {% set updated_at = config['updated_at'] %} + + {% set row_changed_expr -%} + ({{ snapshotted_rel }}.{{ updated_at }} < {{ current_rel }}.{{ updated_at }}) + {%- endset %} + + {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} + + {% do return({ + "unique_key": primary_key, + "updated_at": updated_at, + "row_changed": row_changed_expr, + "scd_id": scd_id_expr + }) %} +{% endmacro %} +""" + + +models__schema_yml = """ +version: 2 +snapshots: + - name: snapshot_actual + data_tests: + - mutually_exclusive_ranges + config: + meta: + owner: 'a_owner' +""" + +models__schema_with_target_schema_yml = """ +version: 2 +snapshots: + - name: snapshot_actual + data_tests: + - mutually_exclusive_ranges + config: + meta: + owner: 'a_owner' + target_schema: schema_from_schema_yml +""" + +models__ref_snapshot_sql = """ +select * from {{ ref('snapshot_actual') }} +""" + +macros__test_no_overlaps_sql = """ +{% macro get_snapshot_unique_id() -%} + {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} +{%- endmacro %} + +{% macro default__get_snapshot_unique_id() -%} + {% do return("id || '-' || first_name") %} +{%- endmacro %} + +{# + mostly copy+pasted from dbt_utils, but I removed some parameters and added + a query that calls get_snapshot_unique_id +#} +{% test mutually_exclusive_ranges(model) %} + +with base as ( + select {{ get_snapshot_unique_id() }} as dbt_unique_id, + * + from {{ model }} +), +window_functions as ( + + select + dbt_valid_from as lower_bound, + coalesce(dbt_valid_to, '2099-1-1T00:00:01') as upper_bound, + + lead(dbt_valid_from) over ( + partition by dbt_unique_id + order by dbt_valid_from + ) as next_lower_bound, + + row_number() over ( + partition by dbt_unique_id + order by dbt_valid_from desc + ) = 1 as is_last_record + + from base + +), + +calc as ( + -- We want to return records where one of our assumptions fails, so we'll use + -- the `not` function with `and` statements so we can write our assumptions nore cleanly + select + *, + + -- For each record: lower_bound should be < upper_bound. + -- Coalesce it to return an error on the null case (implicit assumption + -- these columns are not_null) + coalesce( + lower_bound < upper_bound, + is_last_record + ) as lower_bound_less_than_upper_bound, + + -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound. + -- Coalesce it to handle null cases for the last record. + coalesce( + upper_bound = next_lower_bound, + is_last_record, + false + ) as upper_bound_equal_to_next_lower_bound + + from window_functions + +), + +validation_errors as ( + + select + * + from calc + + where not( + -- THE FOLLOWING SHOULD BE TRUE -- + lower_bound_less_than_upper_bound + and upper_bound_equal_to_next_lower_bound + ) +) + +select * from validation_errors +{% endtest %} +""" + + +snapshots_select_noconfig__snapshot_sql = """ +{% snapshot snapshot_actual %} + + {{ + config( + target_database=var('target_database', database), + target_schema=var('target_schema', schema), + ) + }} + select * from {{target.database}}.{{target.schema}}.seed + +{% endsnapshot %} + +{% snapshot snapshot_castillo %} + + {{ + config( + target_database=var('target_database', database), + updated_at='"1-updated_at"', + ) + }} + select id,first_name,last_name,email,gender,ip_address,updated_at as "1-updated_at" from {{target.database}}.{{schema}}.seed where last_name = 'Castillo' + +{% endsnapshot %} + +{% snapshot snapshot_alvarez %} + + {{ + config( + target_database=var('target_database', database), + ) + }} + select * from {{target.database}}.{{schema}}.seed where last_name = 'Alvarez' + +{% endsnapshot %} + + +{% snapshot snapshot_kelly %} + {# This has no target_database set, which is allowed! #} + select * from {{target.database}}.{{schema}}.seed where last_name = 'Kelly' + +{% endsnapshot %} +""" + + +seeds__seed_newcol_csv = """id,first_name,last_name +1,Judith,Kennedy +2,Arthur,Kelly +3,Rachel,Moreno +""" + +seeds__seed_csv = """id,first_name +1,Judith +2,Arthur +3,Rachel +""" + + +snapshots_pg_custom_namespaced__snapshot_sql = """ +{% snapshot snapshot_actual %} + + {{ + config( + target_database=var('target_database', database), + target_schema=var('target_schema', schema), + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='test.custom', + updated_at='updated_at', + ) + }} + select * from {{target.database}}.{{target.schema}}.seed + +{% endsnapshot %} +""" + +snapshots_pg__snapshot_sql = """ +{% snapshot snapshot_actual %} + + {{ + config( + target_database=var('target_database', database), + target_schema=var('target_schema', schema), + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='timestamp', + updated_at='updated_at', + ) + }} + + {% if var('invalidate_hard_deletes', 'false') | as_bool %} + {{ config(invalidate_hard_deletes=True) }} + {% endif %} + + select * from {{target.database}}.{{target.schema}}.seed + +{% endsnapshot %} +""" + +snapshots_pg__snapshot_no_target_schema_sql = """ +{% snapshot snapshot_actual %} + + {{ + config( + target_database=var('target_database', database), + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='timestamp', + updated_at='updated_at', + ) + }} + + {% if var('invalidate_hard_deletes', 'false') | as_bool %} + {{ config(invalidate_hard_deletes=True) }} + {% endif %} + + select * from {{target.database}}.{{target.schema}}.seed + +{% endsnapshot %} +""" + +models_slow__gen_sql = """ + +{{ config(materialized='ephemeral') }} + + +/* + Generates 50 rows that "appear" to update every + second to a query-er. + + 1 2020-04-21 20:44:00-04 0 + 2 2020-04-21 20:43:59-04 59 + 3 2020-04-21 20:43:58-04 58 + 4 2020-04-21 20:43:57-04 57 + + .... 1 second later .... + + 1 2020-04-21 20:44:01-04 1 + 2 2020-04-21 20:44:00-04 0 + 3 2020-04-21 20:43:59-04 59 + 4 2020-04-21 20:43:58-04 58 + + This view uses pg_sleep(2) to make queries against + the view take a non-trivial amount of time + + Use statement_timestamp() as it changes during a transactions. + If we used now() or current_time or similar, then the timestamp + of the start of the transaction would be returned instead. +*/ + +with gen as ( + + select + id, + date_trunc('second', statement_timestamp()) - (interval '1 second' * id) as updated_at + + from generate_series(1, 10) id + +) + +select + id, + updated_at, + extract(seconds from updated_at)::int as seconds + +from gen, pg_sleep(2) +""" + +snapshots_longtext__snapshot_sql = """ +{% snapshot snapshot_actual %} + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id', + strategy='timestamp', + updated_at='updated_at', + ) + }} + select * from {{target.database}}.{{schema}}.super_long +{% endsnapshot %} +""" + +snapshots_check_col_noconfig__snapshot_sql = """ +{% snapshot snapshot_actual %} + select * from {{target.database}}.{{schema}}.seed +{% endsnapshot %} + +{# This should be exactly the same #} +{% snapshot snapshot_checkall %} + {{ config(check_cols='all') }} + select * from {{target.database}}.{{schema}}.seed +{% endsnapshot %} +""" diff --git a/tests/functional/simple_snapshot/test_basic_snapshot.py b/tests/functional/simple_snapshot/test_basic_snapshot.py new file mode 100644 index 000000000..6165e8e18 --- /dev/null +++ b/tests/functional/simple_snapshot/test_basic_snapshot.py @@ -0,0 +1,373 @@ +from datetime import datetime +import os + +from dbt.tests.util import ( + check_relations_equal, + relation_from_name, + run_dbt, + write_file, +) +import pytest +import pytz + +from tests.functional.simple_snapshot.fixtures import ( + macros__test_no_overlaps_sql, + macros_custom_snapshot__custom_sql, + models__ref_snapshot_sql, + models__schema_with_target_schema_yml, + models__schema_yml, + seeds__seed_csv, + seeds__seed_newcol_csv, + snapshots_pg__snapshot_no_target_schema_sql, + snapshots_pg__snapshot_sql, + snapshots_pg_custom__snapshot_sql, + snapshots_pg_custom_namespaced__snapshot_sql, +) + + +snapshots_check_col__snapshot_sql = """ +{% snapshot snapshot_actual %} + + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='check', + check_cols=['email'], + ) + }} + select * from {{target.database}}.{{schema}}.seed + +{% endsnapshot %} + +{# This should be exactly the same #} +{% snapshot snapshot_checkall %} + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='check', + check_cols='all', + ) + }} + select * from {{target.database}}.{{schema}}.seed +{% endsnapshot %} +""" + + +snapshots_check_col_noconfig__snapshot_sql = """ +{% snapshot snapshot_actual %} + select * from {{target.database}}.{{schema}}.seed +{% endsnapshot %} + +{# This should be exactly the same #} +{% snapshot snapshot_checkall %} + {{ config(check_cols='all') }} + select * from {{target.database}}.{{schema}}.seed +{% endsnapshot %} +""" + + +def snapshot_setup(project, num_snapshot_models=1): + path = os.path.join(project.test_data_dir, "seed_pg.sql") + project.run_sql_file(path) + results = run_dbt(["snapshot"]) + assert len(results) == num_snapshot_models + + run_dbt(["test"]) + check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) + + path = os.path.join(project.test_data_dir, "invalidate_postgres.sql") + project.run_sql_file(path) + + path = os.path.join(project.test_data_dir, "update.sql") + project.run_sql_file(path) + + results = run_dbt(["snapshot"]) + assert len(results) == num_snapshot_models + + run_dbt(["test"]) + check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) + + +def ref_setup(project, num_snapshot_models=1): + path = os.path.join(project.test_data_dir, "seed_pg.sql") + project.run_sql_file(path) + results = run_dbt(["snapshot"]) + assert len(results) == num_snapshot_models + + results = run_dbt(["run"]) + assert len(results) == 1 + + +class Basic: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots_pg__snapshot_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + +class TestBasicSnapshot(Basic): + def test_basic_snapshot(self, project): + snapshot_setup(project, num_snapshot_models=1) + + +class TestBasicRef(Basic): + def test_basic_ref(self, project): + ref_setup(project, num_snapshot_models=1) + + +class TestBasicTargetSchemaConfig(Basic): + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots_pg__snapshot_no_target_schema_sql} + + @pytest.fixture(scope="class") + def project_config_update(self, unique_schema): + return { + "snapshots": { + "test": { + "target_schema": unique_schema + "_alt", + } + } + } + + def test_target_schema(self, project): + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 5 + # ensure that the schema in the snapshot node is the same as target_schema + snapshot_id = "snapshot.test.snapshot_actual" + snapshot_node = manifest.nodes[snapshot_id] + assert snapshot_node.schema == f"{project.test_schema}_alt" + assert ( + snapshot_node.relation_name + == f'"{project.database}"."{project.test_schema}_alt"."snapshot_actual"' + ) + assert snapshot_node.meta == {"owner": "a_owner"} + + # write out schema.yml file and check again + write_file(models__schema_with_target_schema_yml, "models", "schema.yml") + manifest = run_dbt(["parse"]) + snapshot_node = manifest.nodes[snapshot_id] + assert snapshot_node.schema == "schema_from_schema_yml" + + +class CustomNamespace: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots_pg_custom_namespaced__snapshot_sql} + + @pytest.fixture(scope="class") + def macros(self): + return { + "test_no_overlaps.sql": macros__test_no_overlaps_sql, + "custom.sql": macros_custom_snapshot__custom_sql, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + +class TestBasicCustomNamespace(CustomNamespace): + def test_custom_namespace_snapshot(self, project): + snapshot_setup(project, num_snapshot_models=1) + + +class TestRefCustomNamespace(CustomNamespace): + def test_custom_namespace_ref(self, project): + ref_setup(project, num_snapshot_models=1) + + +class CustomSnapshot: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots_pg_custom__snapshot_sql} + + @pytest.fixture(scope="class") + def macros(self): + return { + "test_no_overlaps.sql": macros__test_no_overlaps_sql, + "custom.sql": macros_custom_snapshot__custom_sql, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + +class TestBasicCustomSnapshot(CustomSnapshot): + def test_custom_snapshot(self, project): + snapshot_setup(project, num_snapshot_models=1) + + +class TestRefCustomSnapshot(CustomSnapshot): + def test_custom_ref(self, project): + ref_setup(project, num_snapshot_models=1) + + +class CheckCols: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots_check_col__snapshot_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + +class TestBasicCheckCols(CheckCols): + def test_basic_snapshot(self, project): + snapshot_setup(project, num_snapshot_models=2) + + +class TestRefCheckCols(CheckCols): + def test_check_cols_ref(self, project): + ref_setup(project, num_snapshot_models=2) + + +class ConfiguredCheckCols: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots_check_col_noconfig__snapshot_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + @pytest.fixture(scope="class") + def project_config_update(self): + snapshot_config = { + "snapshots": { + "test": { + "target_schema": "{{ target.schema }}", + "unique_key": "id || '-' || first_name", + "strategy": "check", + "check_cols": ["email"], + } + } + } + return snapshot_config + + +class TestBasicConfiguredCheckCols(ConfiguredCheckCols): + def test_configured_snapshot(self, project): + snapshot_setup(project, num_snapshot_models=2) + + +class TestRefConfiguredCheckCols(ConfiguredCheckCols): + def test_configured_ref(self, project): + ref_setup(project, num_snapshot_models=2) + + +class UpdatedAtCheckCols: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots_check_col_noconfig__snapshot_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + @pytest.fixture(scope="class") + def project_config_update(self): + snapshot_config = { + "snapshots": { + "test": { + "target_schema": "{{ target.schema }}", + "unique_key": "id || '-' || first_name", + "strategy": "check", + "check_cols": "all", + "updated_at": "updated_at", + } + } + } + return snapshot_config + + +class TestBasicUpdatedAtCheckCols(UpdatedAtCheckCols): + def test_updated_at_snapshot(self, project): + snapshot_setup(project, num_snapshot_models=2) + + snapshot_expected_relation = relation_from_name(project.adapter, "snapshot_expected") + revived_records = project.run_sql( + """ + select id, updated_at, dbt_valid_from from {} + """.format( + snapshot_expected_relation + ), + fetch="all", + ) + for result in revived_records: + # result is a tuple, the updated_at is second and dbt_valid_from is latest + assert isinstance(result[1], datetime) + assert isinstance(result[2], datetime) + assert result[1].replace(tzinfo=pytz.UTC) == result[2].replace(tzinfo=pytz.UTC) + + +class TestRefUpdatedAtCheckCols(UpdatedAtCheckCols): + def test_updated_at_ref(self, project): + ref_setup(project, num_snapshot_models=2) diff --git a/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py b/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py new file mode 100644 index 000000000..d5333536f --- /dev/null +++ b/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py @@ -0,0 +1,127 @@ +from dbt.tests.util import check_relations_equal, run_dbt +import pytest + + +snapshot_sql = """ +{% snapshot snapshot_check_cols_new_column %} + {{ + config( + target_database=database, + target_schema=schema, + strategy='check', + unique_key='id', + check_cols=var("check_cols", ['name']), + updated_at="'" ~ var("updated_at") ~ "'::timestamp", + ) + }} + + {% if var('version') == 1 %} + + select 1 as id, 'foo' as name + + {% else %} + + select 1 as id, 'foo' as name, 'bar' as other + + {% endif %} + +{% endsnapshot %} +""" + +expected_csv = """ +id,name,other,dbt_scd_id,dbt_updated_at,dbt_valid_from,dbt_valid_to +1,foo,NULL,0d73ad1b216ad884c9f7395d799c912c,2016-07-01 00:00:00.000,2016-07-01 00:00:00.000,2016-07-02 00:00:00.000 +1,foo,bar,7df3783934a6a707d51254859260b9ff,2016-07-02 00:00:00.000,2016-07-02 00:00:00.000, +""".lstrip() + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot_check_cols_new_column.sql": snapshot_sql} + + +@pytest.fixture(scope="class") +def seeds(): + return {"snapshot_check_cols_new_column_expected.csv": expected_csv} + + +@pytest.fixture(scope="class") +def project_config_update(): + return { + "seeds": { + "quote_columns": False, + "test": { + "snapshot_check_cols_new_column_expected": { + "+column_types": { + "dbt_updated_at": "timestamp without time zone", + "dbt_valid_from": "timestamp without time zone", + "dbt_valid_to": "timestamp without time zone", + }, + }, + }, + }, + } + + +def run_check_cols_snapshot_with_schema_change(project, check_cols_override=None): + """ + Test that snapshots using the "check" strategy and explicit check_cols support adding columns. + + Approach: + 1. Take a snapshot that checks a single non-id column + 2. Add a new column to the data + 3. Take a snapshot that checks the new non-id column too + + As long as no error is thrown, then the snapshot was successful + """ + + check_cols = check_cols_override or ["name", "other"] + + # 1. Create a table that represents the expected data after a series of snapshots + vars_dict = {"version": 1, "updated_at": "2016-07-01"} + results = run_dbt(["seed", "--show", "--vars", str(vars_dict)]) + assert len(results) == 1 + + # Snapshot 1 + # Use only 'name' for check_cols + vars_dict = {"version": 1, "check_cols": [check_cols[0]], "updated_at": "2016-07-01"} + results = run_dbt(["snapshot", "--vars", str(vars_dict)]) + assert len(results) == 1 + + # Snapshot 2 + # Use both 'name' and 'other' for check_cols + vars_dict = {"version": 2, "check_cols": check_cols, "updated_at": "2016-07-02"} + results = run_dbt(["snapshot", "--vars", str(vars_dict)]) + assert len(results) == 1 + + check_relations_equal( + project.adapter, + ["snapshot_check_cols_new_column", "snapshot_check_cols_new_column_expected"], + compare_snapshot_cols=True, + ) + + # Snapshot 3 + # Run it again. Nothing has changed — ensure we don't detect changes + vars_dict = {"version": 2, "check_cols": check_cols, "updated_at": "2016-07-02"} + results = run_dbt(["snapshot", "--vars", str(vars_dict)]) + assert len(results) == 1 + + check_relations_equal( + project.adapter, + ["snapshot_check_cols_new_column", "snapshot_check_cols_new_column_expected"], + compare_snapshot_cols=True, + ) + + +def test_check_cols_snapshot_with_schema_change(project): + run_check_cols_snapshot_with_schema_change(project) + + +def test_check_cols_snapshot_with_schema_change_and_mismatched_casing(project): + """ + Test that this still works if the database-stored version of 'name' + 'other' + differs from the user-configured 'NAME' and 'OTHER' + """ + run_check_cols_snapshot_with_schema_change( + project=project, check_cols_override=["NAME", "OTHER"] + ) diff --git a/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py b/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py new file mode 100644 index 000000000..5540eee5e --- /dev/null +++ b/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py @@ -0,0 +1,128 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.simple_snapshot.fixtures import models_slow__gen_sql + + +test_snapshots_changing_strategy__test_snapshot_sql = """ + +{# /* + Given the repro case for the snapshot build, we'd + expect to see both records have color='pink' + in their most recent rows. +*/ #} + +with expected as ( + + select 1 as id, 'pink' as color union all + select 2 as id, 'pink' as color + +), + +actual as ( + + select id, color + from {{ ref('my_snapshot') }} + where color = 'pink' + and dbt_valid_to is null + +) + +select * from expected +except +select * from actual + +union all + +select * from actual +except +select * from expected +""" + + +snapshots_changing_strategy__snapshot_sql = """ + +{# + REPRO: + 1. Run with check strategy + 2. Add a new ts column and run with check strategy + 3. Run with timestamp strategy on new ts column + + Expect: new entry is added for changed rows in (3) +#} + + +{% snapshot my_snapshot %} + + {#--------------- Configuration ------------ #} + + {{ config( + target_schema=schema, + unique_key='id' + ) }} + + {% if var('strategy') == 'timestamp' %} + {{ config(strategy='timestamp', updated_at='updated_at') }} + {% else %} + {{ config(strategy='check', check_cols=['color']) }} + {% endif %} + + {#--------------- Test setup ------------ #} + + {% if var('step') == 1 %} + + select 1 as id, 'blue' as color + union all + select 2 as id, 'red' as color + + {% elif var('step') == 2 %} + + -- change id=1 color from blue to green + -- id=2 is unchanged when using the check strategy + select 1 as id, 'green' as color, '2020-01-01'::date as updated_at + union all + select 2 as id, 'red' as color, '2020-01-01'::date as updated_at + + {% elif var('step') == 3 %} + + -- bump timestamp for both records. Expect that after this runs + -- using the timestamp strategy, both ids should have the color + -- 'pink' in the database. This should be in the future b/c we're + -- going to compare to the check timestamp, which will be _now_ + select 1 as id, 'pink' as color, (now() + interval '1 day')::date as updated_at + union all + select 2 as id, 'pink' as color, (now() + interval '1 day')::date as updated_at + + {% endif %} + +{% endsnapshot %} +""" + + +@pytest.fixture(scope="class") +def models(): + return {"gen.sql": models_slow__gen_sql} + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot.sql": snapshots_changing_strategy__snapshot_sql} + + +@pytest.fixture(scope="class") +def tests(): + return {"test_snapshot.sql": test_snapshots_changing_strategy__test_snapshot_sql} + + +def test_changing_strategy(project): + results = run_dbt(["snapshot", "--vars", "{strategy: check, step: 1}"]) + assert len(results) == 1 + + results = run_dbt(["snapshot", "--vars", "{strategy: check, step: 2}"]) + assert len(results) == 1 + + results = run_dbt(["snapshot", "--vars", "{strategy: timestamp, step: 3}"]) + assert len(results) == 1 + + results = run_dbt(["test"]) + assert len(results) == 1 diff --git a/tests/functional/simple_snapshot/test_check_cols_snapshot.py b/tests/functional/simple_snapshot/test_check_cols_snapshot.py new file mode 100644 index 000000000..d11442e82 --- /dev/null +++ b/tests/functional/simple_snapshot/test_check_cols_snapshot.py @@ -0,0 +1,114 @@ +from dbt.tests.util import run_dbt +import pytest + + +snapshot_sql = """ +{% snapshot check_cols_cycle %} + + {{ + config( + target_database=database, + target_schema=schema, + unique_key='id', + strategy='check', + check_cols=['color'] + ) + }} + + {% if var('version') == 1 %} + + select 1 as id, 'red' as color union all + select 2 as id, 'green' as color + + {% elif var('version') == 2 %} + + select 1 as id, 'blue' as color union all + select 2 as id, 'green' as color + + {% elif var('version') == 3 %} + + select 1 as id, 'red' as color union all + select 2 as id, 'pink' as color + + {% else %} + {% do exceptions.raise_compiler_error("Got bad version: " ~ var('version')) %} + {% endif %} + +{% endsnapshot %} +""" + +snapshot_test_sql = """ +with query as ( + + -- check that the current value for id=1 is red + select case when ( + select count(*) + from {{ ref('check_cols_cycle') }} + where id = 1 and color = 'red' and dbt_valid_to is null + ) = 1 then 0 else 1 end as failures + + union all + + -- check that the previous 'red' value for id=1 is invalidated + select case when ( + select count(*) + from {{ ref('check_cols_cycle') }} + where id = 1 and color = 'red' and dbt_valid_to is not null + ) = 1 then 0 else 1 end as failures + + union all + + -- check that there's only one current record for id=2 + select case when ( + select count(*) + from {{ ref('check_cols_cycle') }} + where id = 2 and color = 'pink' and dbt_valid_to is null + ) = 1 then 0 else 1 end as failures + + union all + + -- check that the previous value for id=2 is represented + select case when ( + select count(*) + from {{ ref('check_cols_cycle') }} + where id = 2 and color = 'green' and dbt_valid_to is not null + ) = 1 then 0 else 1 end as failures + + union all + + -- check that there are 5 records total in the table + select case when ( + select count(*) + from {{ ref('check_cols_cycle') }} + ) = 5 then 0 else 1 end as failures + +) + +select * +from query +where failures = 1 +""" + + +@pytest.fixture(scope="class") +def snapshots(): + return {"my_snapshot.sql": snapshot_sql} + + +@pytest.fixture(scope="class") +def tests(): + return {"my_test.sql": snapshot_test_sql} + + +def test_simple_snapshot(project): + + results = run_dbt(["snapshot", "--vars", "version: 1"]) + assert len(results) == 1 + + results = run_dbt(["snapshot", "--vars", "version: 2"]) + assert len(results) == 1 + + results = run_dbt(["snapshot", "--vars", "version: 3"]) + assert len(results) == 1 + + run_dbt(["test", "--select", "test_type:singular", "--vars", "version: 3"]) diff --git a/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py b/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py new file mode 100644 index 000000000..0c99d85e4 --- /dev/null +++ b/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py @@ -0,0 +1,114 @@ +from dbt.tests.util import check_relations_equal, run_dbt +import pytest + + +snapshot_sql = """ +{% snapshot snapshot_check_cols_updated_at_actual %} + {{ + config( + target_database=database, + target_schema=schema, + unique_key='id', + strategy='check', + check_cols='all', + updated_at="'" ~ var("updated_at") ~ "'::timestamp", + ) + }} + + {% if var('version') == 1 %} + + select 'a' as id, 10 as counter, '2016-01-01T00:00:00Z'::timestamp as timestamp_col union all + select 'b' as id, 20 as counter, '2016-01-01T00:00:00Z'::timestamp as timestamp_col + + {% elif var('version') == 2 %} + + select 'a' as id, 30 as counter, '2016-01-02T00:00:00Z'::timestamp as timestamp_col union all + select 'b' as id, 20 as counter, '2016-01-01T00:00:00Z'::timestamp as timestamp_col union all + select 'c' as id, 40 as counter, '2016-01-02T00:00:00Z'::timestamp as timestamp_col + + {% else %} + + select 'a' as id, 30 as counter, '2016-01-02T00:00:00Z'::timestamp as timestamp_col union all + select 'c' as id, 40 as counter, '2016-01-02T00:00:00Z'::timestamp as timestamp_col + + {% endif %} + +{% endsnapshot %} +""" + +expected_csv = """ +id,counter,timestamp_col,dbt_scd_id,dbt_updated_at,dbt_valid_from,dbt_valid_to +a,10,2016-01-01 00:00:00.000,927354aa091feffd9437ead0bdae7ae1,2016-07-01 00:00:00.000,2016-07-01 00:00:00.000,2016-07-02 00:00:00.000 +b,20,2016-01-01 00:00:00.000,40ace4cbf8629f1720ec8a529ed76f8c,2016-07-01 00:00:00.000,2016-07-01 00:00:00.000, +a,30,2016-01-02 00:00:00.000,e9133f2b302c50e36f43e770944cec9b,2016-07-02 00:00:00.000,2016-07-02 00:00:00.000, +c,40,2016-01-02 00:00:00.000,09d33d35101e788c152f65d0530b6837,2016-07-02 00:00:00.000,2016-07-02 00:00:00.000, +""".lstrip() + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot_check_cols_updated_at_actual.sql": snapshot_sql} + + +@pytest.fixture(scope="class") +def seeds(): + return {"snapshot_check_cols_updated_at_expected.csv": expected_csv} + + +@pytest.fixture(scope="class") +def project_config_update(): + return { + "seeds": { + "quote_columns": False, + "test": { + "snapshot_check_cols_updated_at_expected": { + "+column_types": { + "timestamp_col": "timestamp without time zone", + "dbt_updated_at": "timestamp without time zone", + "dbt_valid_from": "timestamp without time zone", + "dbt_valid_to": "timestamp without time zone", + }, + }, + }, + }, + } + + +def test_simple_snapshot(project): + """ + Test that the `dbt_updated_at` column reflects the `updated_at` timestamp expression in the config. + + Approach: + 1. Create a table that represents the expected data after a series of snapshots + - Use dbt seed to create the expected relation (`snapshot_check_cols_updated_at_expected`) + 2. Execute a series of snapshots to create the data + - Use a series of (3) dbt snapshot commands to create the actual relation (`snapshot_check_cols_updated_at_actual`) + - The logic can switch between 3 different versions of the data (depending on the `version` number) + - The `updated_at` value is passed in via `--vars` and cast to a timestamp in the snapshot config + 3. Compare the two relations for equality + """ + + # 1. Create a table that represents the expected data after a series of snapshots + results = run_dbt(["seed", "--show", "--vars", "{version: 1, updated_at: 2016-07-01}"]) + assert len(results) == 1 + + # 2. Execute a series of snapshots to create the data + + # Snapshot day 1 + results = run_dbt(["snapshot", "--vars", "{version: 1, updated_at: 2016-07-01}"]) + assert len(results) == 1 + + # Snapshot day 2 + results = run_dbt(["snapshot", "--vars", "{version: 2, updated_at: 2016-07-02}"]) + assert len(results) == 1 + + # Snapshot day 3 + results = run_dbt(["snapshot", "--vars", "{version: 3, updated_at: 2016-07-03}"]) + assert len(results) == 1 + + # 3. Compare the two relations for equality + check_relations_equal( + project.adapter, + ["snapshot_check_cols_updated_at_actual", "snapshot_check_cols_updated_at_expected"], + compare_snapshot_cols=True, + ) diff --git a/tests/functional/simple_snapshot/test_comment_ending_snapshot.py b/tests/functional/simple_snapshot/test_comment_ending_snapshot.py new file mode 100644 index 000000000..ab21b641b --- /dev/null +++ b/tests/functional/simple_snapshot/test_comment_ending_snapshot.py @@ -0,0 +1,36 @@ +import os + +from dbt.tests.util import run_dbt +import pytest + + +snapshots_with_comment_at_end__snapshot_sql = """ +{% snapshot snapshot_actual %} + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id', + strategy='check', + check_cols=['email'], + ) + }} + select * from {{target.database}}.{{schema}}.seed + -- Test comment to prevent recurrence of https://github.com/dbt-labs/dbt-core/issues/6781 +{% endsnapshot %} +""" + + +class TestSnapshotsWithCommentAtEnd: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots_with_comment_at_end__snapshot_sql} + + def test_comment_ending(self, project): + path = os.path.join(project.test_data_dir, "seed_pg.sql") + project.run_sql_file(path) + # N.B. Snapshot is run twice to ensure snapshot_check_all_get_existing_columns is fully run + # (it exits early if the table doesn't already exist) + run_dbt(["snapshot"]) + results = run_dbt(["snapshot"]) + assert len(results) == 1 diff --git a/tests/functional/simple_snapshot/test_cross_schema_snapshot.py b/tests/functional/simple_snapshot/test_cross_schema_snapshot.py new file mode 100644 index 000000000..1072a5aa8 --- /dev/null +++ b/tests/functional/simple_snapshot/test_cross_schema_snapshot.py @@ -0,0 +1,48 @@ +import os + +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.simple_snapshot.fixtures import ( + macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, + snapshots_pg__snapshot_sql, +) + + +NUM_SNAPSHOT_MODELS = 1 + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot.sql": snapshots_pg__snapshot_sql} + + +@pytest.fixture(scope="class") +def models(): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + +@pytest.fixture(scope="class") +def macros(): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + +def test_cross_schema_snapshot(project): + # populate seed and snapshot tables + path = os.path.join(project.test_data_dir, "seed_pg.sql") + project.run_sql_file(path) + + target_schema = "{}_snapshotted".format(project.test_schema) + + # create a snapshot using the new schema + results = run_dbt(["snapshot", "--vars", '{{"target_schema": "{}"}}'.format(target_schema)]) + assert len(results) == NUM_SNAPSHOT_MODELS + + # run dbt from test_schema with a ref to to new target_schema + results = run_dbt(["run", "--vars", '{{"target_schema": {}}}'.format(target_schema)]) + assert len(results) == 1 diff --git a/tests/functional/simple_snapshot/test_hard_delete_snapshot.py b/tests/functional/simple_snapshot/test_hard_delete_snapshot.py new file mode 100644 index 000000000..ab25bbfab --- /dev/null +++ b/tests/functional/simple_snapshot/test_hard_delete_snapshot.py @@ -0,0 +1,192 @@ +from datetime import datetime, timedelta +import os + +from dbt.tests.adapter.utils.test_current_timestamp import is_aware +from dbt.tests.util import run_dbt, check_relations_equal +import pytest +import pytz + +from tests.functional.simple_snapshot.fixtures import ( + macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, + snapshots_pg__snapshot_sql, +) + + +# These tests uses the same seed data, containing 20 records of which we hard delete the last 10. +# These deleted records set the dbt_valid_to to time the snapshot was ran. + + +def convert_to_aware(d: datetime) -> datetime: + # There are two types of datetime objects in Python: naive and aware + # Assume any dbt snapshot timestamp that is naive is meant to represent UTC + if d is None: + return d + elif is_aware(d): + return d + else: + return d.replace(tzinfo=pytz.UTC) + + +def is_close_datetime( + dt1: datetime, dt2: datetime, atol: timedelta = timedelta(microseconds=1) +) -> bool: + # Similar to pytest.approx, math.isclose, and numpy.isclose + # Use an absolute tolerance to compare datetimes that may not be perfectly equal. + # Two None values will compare as equal. + if dt1 is None and dt2 is None: + return True + elif dt1 is not None and dt2 is not None: + return (dt1 > (dt2 - atol)) and (dt1 < (dt2 + atol)) + else: + return False + + +def datetime_snapshot(): + NUM_SNAPSHOT_MODELS = 1 + begin_snapshot_datetime = datetime.now(pytz.UTC) + results = run_dbt(["snapshot", "--vars", "{invalidate_hard_deletes: true}"]) + assert len(results) == NUM_SNAPSHOT_MODELS + + return begin_snapshot_datetime + + +@pytest.fixture(scope="class", autouse=True) +def setUp(project): + path = os.path.join(project.test_data_dir, "seed_pg.sql") + project.run_sql_file(path) + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot.sql": snapshots_pg__snapshot_sql} + + +@pytest.fixture(scope="class") +def models(): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + +@pytest.fixture(scope="class") +def macros(): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + +def test_snapshot_hard_delete(project): + # run the first snapshot + datetime_snapshot() + + check_relations_equal(project.adapter, ["snapshot_expected", "snapshot_actual"]) + + invalidated_snapshot_datetime = None + revived_snapshot_datetime = None + + # hard delete last 10 records + project.run_sql( + "delete from {}.{}.seed where id >= 10;".format(project.database, project.test_schema) + ) + + # snapshot and assert invalidated + invalidated_snapshot_datetime = datetime_snapshot() + + snapshotted = project.run_sql( + """ + select + id, + dbt_valid_to + from {}.{}.snapshot_actual + order by id + """.format( + project.database, project.test_schema + ), + fetch="all", + ) + + assert len(snapshotted) == 20 + for result in snapshotted[10:]: + # result is a tuple, the dbt_valid_to column is the latest + assert isinstance(result[-1], datetime) + dbt_valid_to = convert_to_aware(result[-1]) + + # Plenty of wiggle room if clocks aren't perfectly sync'd, etc + assert is_close_datetime( + dbt_valid_to, invalidated_snapshot_datetime, timedelta(minutes=1) + ), f"SQL timestamp {dbt_valid_to.isoformat()} is not close enough to Python UTC {invalidated_snapshot_datetime.isoformat()}" + + # revive records + # Timestamp must have microseconds for tests below to be meaningful + # Assume `updated_at` is TIMESTAMP WITHOUT TIME ZONE that implicitly represents UTC + revival_timestamp = datetime.now(pytz.UTC).strftime("%Y-%m-%d %H:%M:%S.%f") + project.run_sql( + """ + insert into {}.{}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values + (10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '{}'), + (11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '{}') + """.format( + project.database, project.test_schema, revival_timestamp, revival_timestamp + ) + ) + + # snapshot and assert records were revived + # Note: the revived_snapshot_datetime here is later than the revival_timestamp above + revived_snapshot_datetime = datetime_snapshot() + + # records which weren't revived (id != 10, 11) + # dbt_valid_to is not null + invalidated_records = project.run_sql( + """ + select + id, + dbt_valid_to + from {}.{}.snapshot_actual + where dbt_valid_to is not null + order by id + """.format( + project.database, project.test_schema + ), + fetch="all", + ) + + assert len(invalidated_records) == 11 + for result in invalidated_records: + # result is a tuple, the dbt_valid_to column is the latest + assert isinstance(result[1], datetime) + dbt_valid_to = convert_to_aware(result[1]) + + # Plenty of wiggle room if clocks aren't perfectly sync'd, etc + assert is_close_datetime( + dbt_valid_to, invalidated_snapshot_datetime, timedelta(minutes=1) + ), f"SQL timestamp {dbt_valid_to.isoformat()} is not close enough to Python UTC {invalidated_snapshot_datetime.isoformat()}" + + # records which were revived (id = 10, 11) + # dbt_valid_to is null + revived_records = project.run_sql( + """ + select + id, + dbt_valid_from, + dbt_valid_to + from {}.{}.snapshot_actual + where dbt_valid_to is null + and id IN (10, 11) + """.format( + project.database, project.test_schema + ), + fetch="all", + ) + + assert len(revived_records) == 2 + for result in revived_records: + # result is a tuple, the dbt_valid_from is second and dbt_valid_to is latest + # dbt_valid_from is the same as the 'updated_at' added in the revived_rows + # dbt_valid_to is null + assert isinstance(result[1], datetime) + dbt_valid_from = convert_to_aware(result[1]) + dbt_valid_to = result[2] + + assert dbt_valid_from <= revived_snapshot_datetime + assert dbt_valid_to is None diff --git a/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py b/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py new file mode 100644 index 000000000..1ee8fa40d --- /dev/null +++ b/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py @@ -0,0 +1,67 @@ +import os + +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.simple_snapshot.fixtures import ( + macros__test_no_overlaps_sql, + macros_custom_snapshot__custom_sql, + models__ref_snapshot_sql, + models__schema_yml, + seeds__seed_csv, + seeds__seed_newcol_csv, +) + + +NUM_SNAPSHOT_MODELS = 1 + + +snapshots_pg_custom_invalid__snapshot_sql = """ +{% snapshot snapshot_actual %} + {# this custom strategy does not exist in the 'dbt' package #} + {{ + config( + target_database=var('target_database', database), + target_schema=var('target_schema', schema), + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='dbt.custom', + updated_at='updated_at', + ) + }} + select * from {{target.database}}.{{target.schema}}.seed + +{% endsnapshot %} +""" + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshots.sql": snapshots_pg_custom_invalid__snapshot_sql} + + +@pytest.fixture(scope="class") +def macros(): + return { + "test_no_overlaps.sql": macros__test_no_overlaps_sql, + "custom.sql": macros_custom_snapshot__custom_sql, + } + + +@pytest.fixture(scope="class") +def models(): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + +@pytest.fixture(scope="class") +def seeds(): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + +def test_custom_snapshot_invalid_namespace(project): + path = os.path.join(project.test_data_dir, "seed_pg.sql") + project.run_sql_file(path) + results = run_dbt(["snapshot"], expect_pass=False) + assert len(results) == NUM_SNAPSHOT_MODELS diff --git a/tests/functional/simple_snapshot/test_long_text_snapshot.py b/tests/functional/simple_snapshot/test_long_text_snapshot.py new file mode 100644 index 000000000..0793a3fcc --- /dev/null +++ b/tests/functional/simple_snapshot/test_long_text_snapshot.py @@ -0,0 +1,70 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.simple_snapshot.fixtures import ( + macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, +) + + +seed_longtext_sql = """ +create table {database}.{schema}.super_long ( + id INTEGER, + longstring TEXT, + updated_at TIMESTAMP WITHOUT TIME ZONE +); + +insert into {database}.{schema}.super_long (id, longstring, updated_at) VALUES +(1, 'short', current_timestamp), +(2, repeat('a', 500), current_timestamp); +""" + +snapshots_longtext__snapshot_sql = """ +{% snapshot snapshot_actual %} + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id', + strategy='timestamp', + updated_at='updated_at', + ) + }} + select * from {{target.database}}.{{schema}}.super_long +{% endsnapshot %} +""" + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot.sql": snapshots_longtext__snapshot_sql} + + +@pytest.fixture(scope="class") +def models(): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + +@pytest.fixture(scope="class") +def macros(): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + +def test_long_text(project): + project.run_sql(seed_longtext_sql) + + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + with project.adapter.connection_named("test"): + status, results = project.adapter.execute( + "select * from {}.{}.snapshot_actual".format(project.database, project.test_schema), + fetch=True, + ) + assert len(results) == 2 + got_names = set(r.get("longstring") for r in results) + assert got_names == {"a" * 500, "short"} diff --git a/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py b/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py new file mode 100644 index 000000000..3a28bc778 --- /dev/null +++ b/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py @@ -0,0 +1,51 @@ +from dbt.exceptions import ParsingError +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.simple_snapshot.fixtures import ( + macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, +) + + +snapshots_invalid__snapshot_sql = """ +{# make sure to never name this anything with `target_schema` in the name, or the test will be invalid! #} +{% snapshot missing_field_target_underscore_schema %} + {# missing the mandatory target_schema parameter #} + {{ + config( + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='timestamp', + updated_at='updated_at', + ) + }} + select * from {{target.database}}.{{schema}}.seed + +{% endsnapshot %} +""" + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot.sql": snapshots_invalid__snapshot_sql} + + +@pytest.fixture(scope="class") +def models(): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + +@pytest.fixture(scope="class") +def macros(): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + +def test_missing_strategy(project): + with pytest.raises(ParsingError) as exc: + run_dbt(["compile"], expect_pass=False) + + assert "Snapshots must be configured with a 'strategy'" in str(exc.value) diff --git a/tests/functional/simple_snapshot/test_renamed_source_snapshot.py b/tests/functional/simple_snapshot/test_renamed_source_snapshot.py new file mode 100644 index 000000000..23db614bb --- /dev/null +++ b/tests/functional/simple_snapshot/test_renamed_source_snapshot.py @@ -0,0 +1,74 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.simple_snapshot.fixtures import ( + macros__test_no_overlaps_sql, + macros_custom_snapshot__custom_sql, + seeds__seed_csv, + seeds__seed_newcol_csv, +) + + +snapshots_checkall__snapshot_sql = """ +{% snapshot my_snapshot %} + {{ config(check_cols='all', unique_key='id', strategy='check', target_database=database, target_schema=schema) }} + select * from {{ ref(var('seed_name', 'seed')) }} +{% endsnapshot %} +""" + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot.sql": snapshots_checkall__snapshot_sql} + + +@pytest.fixture(scope="class") +def macros(): + return { + "test_no_overlaps.sql": macros__test_no_overlaps_sql, + "custom.sql": macros_custom_snapshot__custom_sql, + } + + +@pytest.fixture(scope="class") +def seeds(): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + +def test_renamed_source(project): + run_dbt(["seed"]) + run_dbt(["snapshot"]) + database = project.database + results = project.run_sql( + "select * from {}.{}.my_snapshot".format(database, project.test_schema), + fetch="all", + ) + assert len(results) == 3 + for result in results: + assert len(result) == 6 + + # over ride the ref var in the snapshot definition to use a seed with an additional column, last_name + run_dbt(["snapshot", "--vars", "{seed_name: seed_newcol}"]) + results = project.run_sql( + "select * from {}.{}.my_snapshot where last_name is not NULL".format( + database, project.test_schema + ), + fetch="all", + ) + assert len(results) == 3 + + for result in results: + # new column + assert len(result) == 7 + assert result[-1] is not None + + results = project.run_sql( + "select * from {}.{}.my_snapshot where last_name is NULL".format( + database, project.test_schema + ), + fetch="all", + ) + assert len(results) == 3 + for result in results: + # new column + assert len(result) == 7 diff --git a/tests/functional/simple_snapshot/test_select_exclude_snapshot.py b/tests/functional/simple_snapshot/test_select_exclude_snapshot.py new file mode 100644 index 000000000..ac2b4bc92 --- /dev/null +++ b/tests/functional/simple_snapshot/test_select_exclude_snapshot.py @@ -0,0 +1,161 @@ +import os + +from dbt.tests.util import ( + check_relations_equal, + check_table_does_not_exist, + run_dbt, +) +import pytest + +from tests.functional.simple_snapshot.fixtures import ( + macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, + seeds__seed_csv, + seeds__seed_newcol_csv, + snapshots_pg__snapshot_sql, + snapshots_select__snapshot_sql, + snapshots_select_noconfig__snapshot_sql, +) + + +def all_snapshots(project): + path = os.path.join(project.test_data_dir, "seed_pg.sql") + project.run_sql_file(path) + + results = run_dbt(["snapshot"]) + assert len(results) == 4 + + check_relations_equal(project.adapter, ["snapshot_castillo", "snapshot_castillo_expected"]) + check_relations_equal(project.adapter, ["snapshot_alvarez", "snapshot_alvarez_expected"]) + check_relations_equal(project.adapter, ["snapshot_kelly", "snapshot_kelly_expected"]) + check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) + + path = os.path.join(project.test_data_dir, "invalidate_postgres.sql") + project.run_sql_file(path) + + path = os.path.join(project.test_data_dir, "update.sql") + project.run_sql_file(path) + + results = run_dbt(["snapshot"]) + assert len(results) == 4 + check_relations_equal(project.adapter, ["snapshot_castillo", "snapshot_castillo_expected"]) + check_relations_equal(project.adapter, ["snapshot_alvarez", "snapshot_alvarez_expected"]) + check_relations_equal(project.adapter, ["snapshot_kelly", "snapshot_kelly_expected"]) + check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) + + +def exclude_snapshots(project): + path = os.path.join(project.test_data_dir, "seed_pg.sql") + project.run_sql_file(path) + results = run_dbt(["snapshot", "--exclude", "snapshot_castillo"]) + assert len(results) == 3 + + check_table_does_not_exist(project.adapter, "snapshot_castillo") + check_relations_equal(project.adapter, ["snapshot_alvarez", "snapshot_alvarez_expected"]) + check_relations_equal(project.adapter, ["snapshot_kelly", "snapshot_kelly_expected"]) + check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) + + +def select_snapshots(project): + path = os.path.join(project.test_data_dir, "seed_pg.sql") + project.run_sql_file(path) + results = run_dbt(["snapshot", "--select", "snapshot_castillo"]) + assert len(results) == 1 + + check_relations_equal(project.adapter, ["snapshot_castillo", "snapshot_castillo_expected"]) + check_table_does_not_exist(project.adapter, "snapshot_alvarez") + check_table_does_not_exist(project.adapter, "snapshot_kelly") + check_table_does_not_exist(project.adapter, "snapshot_actual") + + +# all of the tests below use one of both of the above tests with +# various combinations of snapshots and macros +class SelectBasicSetup: + @pytest.fixture(scope="class") + def snapshots(self): + return { + "snapshot.sql": snapshots_pg__snapshot_sql, + "snapshot_select.sql": snapshots_select__snapshot_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + +class TestAllBasic(SelectBasicSetup): + def test_all_snapshots(self, project): + all_snapshots(project) + + +class TestExcludeBasic(SelectBasicSetup): + def test_exclude_snapshots(self, project): + exclude_snapshots(project) + + +class TestSelectBasic(SelectBasicSetup): + def test_select_snapshots(self, project): + select_snapshots(project) + + +class SelectConfiguredSetup: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshots_select_noconfig__snapshot_sql} + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models__schema_yml, + "ref_snapshot.sql": models__ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} + + # TODO: don't have access to project here so this breaks + @pytest.fixture(scope="class") + def project_config_update(self): + snapshot_config = { + "snapshots": { + "test": { + "target_schema": "{{ target.schema }}", + "unique_key": "id || '-' || first_name", + "strategy": "timestamp", + "updated_at": "updated_at", + } + } + } + return snapshot_config + + +class TestConfigured(SelectConfiguredSetup): + def test_all_configured_snapshots(self, project): + all_snapshots(project) + + +class TestConfiguredExclude(SelectConfiguredSetup): + def test_exclude_configured_snapshots(self, project): + exclude_snapshots(project) + + +class TestConfiguredSelect(SelectConfiguredSetup): + def test_select_configured_snapshots(self, project): + select_snapshots(project) diff --git a/tests/functional/simple_snapshot/test_slow_query_snapshot.py b/tests/functional/simple_snapshot/test_slow_query_snapshot.py new file mode 100644 index 000000000..a65b6cb3a --- /dev/null +++ b/tests/functional/simple_snapshot/test_slow_query_snapshot.py @@ -0,0 +1,82 @@ +from dbt.tests.util import run_dbt +import pytest + +from tests.functional.simple_snapshot.fixtures import models_slow__gen_sql + + +snapshots_slow__snapshot_sql = """ + +{% snapshot my_slow_snapshot %} + + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id', + strategy='timestamp', + updated_at='updated_at' + ) + }} + + select + id, + updated_at, + seconds + + from {{ ref('gen') }} + +{% endsnapshot %} +""" + + +test_snapshots_slow__test_timestamps_sql = """ + +/* + Assert that the dbt_valid_from of the latest record + is equal to the dbt_valid_to of the previous record +*/ + +with snapshot as ( + + select * from {{ ref('my_slow_snapshot') }} + +) + +select + snap1.id, + snap1.dbt_valid_from as new_valid_from, + snap2.dbt_valid_from as old_valid_from, + snap2.dbt_valid_to as old_valid_to + +from snapshot as snap1 +join snapshot as snap2 on snap1.id = snap2.id +where snap1.dbt_valid_to is null + and snap2.dbt_valid_to is not null + and snap1.dbt_valid_from != snap2.dbt_valid_to +""" + + +@pytest.fixture(scope="class") +def models(): + return {"gen.sql": models_slow__gen_sql} + + +@pytest.fixture(scope="class") +def snapshots(): + return {"snapshot.sql": snapshots_slow__snapshot_sql} + + +@pytest.fixture(scope="class") +def tests(): + return {"test_timestamps.sql": test_snapshots_slow__test_timestamps_sql} + + +def test_slow(project): + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + results = run_dbt(["test"]) + assert len(results) == 1 diff --git a/tests/functional/source_overrides/fixtures.py b/tests/functional/source_overrides/fixtures.py new file mode 100644 index 000000000..f7f49235d --- /dev/null +++ b/tests/functional/source_overrides/fixtures.py @@ -0,0 +1,387 @@ +import pytest + + +dupe_models__schema2_yml = """ +version: 2 +sources: + - name: my_source + overrides: localdep + schema: "{{ target.schema }}" + database: "{{ target.database }}" + freshness: + error_after: {count: 3, period: day} + tables: + - name: my_table + freshness: null + identifier: my_real_seed + # on the override, the "color" column is only unique, it can be null! + columns: + - name: id + data_tests: + - not_null + - unique + - name: color + data_tests: + - unique + - name: my_other_table + freshness: null + identifier: my_real_other_seed + - name: snapshot_freshness + identifier: snapshot_freshness_base + + freshness: + error_after: {count: 1, period: day} + +""" + +dupe_models__schema1_yml = """ +version: 2 +sources: + - name: my_source + overrides: localdep + schema: "{{ target.schema }}" + database: "{{ target.database }}" + freshness: + error_after: {count: 3, period: day} + tables: + - name: my_table + freshness: null + identifier: my_real_seed + # on the override, the "color" column is only unique, it can be null! + columns: + - name: id + data_tests: + - not_null + - unique + - name: color + data_tests: + - unique + - name: my_other_table + freshness: null + identifier: my_real_other_seed + - name: snapshot_freshness + identifier: snapshot_freshness_base + loaded_at_field: updated_at + freshness: + error_after: {count: 1, period: day} + +""" + +local_dependency__dbt_project_yml = """ +config-version: 2 +name: localdep + +version: '1.0' + +profile: 'default' + +seeds: + quote_columns: False + +seed-paths: ['seeds'] + +""" + +local_dependency__models__schema_yml = """ +version: 2 +sources: + - name: my_source + schema: invalid_schema + database: invalid_database + freshness: + error_after: {count: 3, period: hour} + tables: + - name: my_table + freshness: null + identifier: my_seed + columns: + - name: id + data_tests: + - unique + - not_null + - name: color + data_tests: + - unique + - not_null + - name: my_other_table + identifier: my_other_seed + columns: + - name: id + data_tests: + - unique + - not_null + - name: letter + data_tests: + - unique + - not_null + - name: snapshot_freshness + identifier: snapshot_freshness_base + loaded_at_field: updated_at + freshness: + error_after: {count: 1, period: hour} + - name: my_other_source + schema: "{{ target.schema }}" + database: "{{ target.database }}" + freshness: + error_after: {count: 1, period: day} + tables: + - name: never_fresh + loaded_at_field: updated_at + +""" + +local_dependency__models__my_model_sql = """ + +{{ config(materialized="table") }} + +with colors as ( + select id, color from {{ source('my_source', 'my_table') }} +), +letters as ( + select id, letter from {{ source('my_source', 'my_other_table') }} +) +select letter, color from colors join letters using (id) + +""" + +local_dependency__seeds__my_other_seed_csv = """id,letter +1,r +2,g +3,b +""" + +local_dependency__seeds__my_seed_csv = """id,color +1,red +2,green +3,blue +""" + +local_dependency__seeds__keep__never_fresh_csv = """favorite_color,id,first_name,email,ip_address,updated_at +blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31 +blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14 +blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57 +blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43 +blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29 +blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50 +blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59 +blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15 +blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48 +blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49 +blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11 +blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14 +blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36 +blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04 +blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56 +blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47 +blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35 +blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57 +blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15 +blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05 +blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49 +blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56 +blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43 +blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44 +blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02 +blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25 +blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18 +blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37 +blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12 +blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23 +blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50 +blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27 +blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44 +blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05 +blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39 +blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52 +blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25 +blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51 +blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29 +blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54 +blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56 +blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31 +blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28 +blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07 +blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15 +blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45 +blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19 +blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24 +blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48 +blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49 +""" + +local_dependency__seeds__keep__snapshot_freshness_base_csv = """favorite_color,id,first_name,email,ip_address,updated_at +blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31 +blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14 +blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57 +blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43 +blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29 +blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50 +blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59 +blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15 +blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48 +blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49 +blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11 +blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14 +blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36 +blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04 +blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56 +blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47 +blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35 +blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57 +blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15 +blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05 +blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49 +blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56 +blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43 +blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44 +blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02 +blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25 +blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18 +blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37 +blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12 +blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23 +blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50 +blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27 +blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44 +blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05 +blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39 +blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52 +blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25 +blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51 +blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29 +blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54 +blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56 +blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31 +blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28 +blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07 +blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15 +blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45 +blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19 +blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24 +blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48 +blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49 +green,51,Wayne,whudson1e@princeton.edu,'90.61.24.102',1983-07-03 16:58:12 +green,52,Rose,rjames1f@plala.or.jp,'240.83.81.10',1995-06-08 11:46:23 +green,53,Louise,lcox1g@theglobeandmail.com,'105.11.82.145',2016-09-19 14:45:51 +green,54,Kenneth,kjohnson1h@independent.co.uk,'139.5.45.94',1976-08-17 11:26:19 +green,55,Donna,dbrown1i@amazon.co.uk,'19.45.169.45',2006-05-27 16:51:40 +green,56,Johnny,jvasquez1j@trellian.com,'118.202.238.23',1975-11-17 08:42:32 +green,57,Patrick,pramirez1k@tamu.edu,'231.25.153.198',1997-08-06 11:51:09 +green,58,Helen,hlarson1l@prweb.com,'8.40.21.39',1993-08-04 19:53:40 +green,59,Patricia,pspencer1m@gmpg.org,'212.198.40.15',1977-08-03 16:37:27 +green,60,Joseph,jspencer1n@marriott.com,'13.15.63.238',2005-07-23 20:22:06 +green,61,Phillip,pschmidt1o@blogtalkradio.com,'177.98.201.190',1976-05-19 21:47:44 +green,62,Joan,jwebb1p@google.ru,'105.229.170.71',1972-09-07 17:53:47 +green,63,Phyllis,pkennedy1q@imgur.com,'35.145.8.244',2000-01-01 22:33:37 +green,64,Katherine,khunter1r@smh.com.au,'248.168.205.32',1991-01-09 06:40:24 +green,65,Laura,lvasquez1s@wiley.com,'128.129.115.152',1997-10-23 12:04:56 +green,66,Juan,jdunn1t@state.gov,'44.228.124.51',2004-11-10 05:07:35 +green,67,Judith,jholmes1u@wiley.com,'40.227.179.115',1977-08-02 17:01:45 +green,68,Beverly,bbaker1v@wufoo.com,'208.34.84.59',2016-03-06 20:07:23 +green,69,Lawrence,lcarr1w@flickr.com,'59.158.212.223',1988-09-13 06:07:21 +green,70,Gloria,gwilliams1x@mtv.com,'245.231.88.33',1995-03-18 22:32:46 +green,71,Steven,ssims1y@cbslocal.com,'104.50.58.255',2001-08-05 21:26:20 +green,72,Betty,bmills1z@arstechnica.com,'103.177.214.220',1981-12-14 21:26:54 +green,73,Mildred,mfuller20@prnewswire.com,'151.158.8.130',2000-04-19 10:13:55 +green,74,Donald,dday21@icq.com,'9.178.102.255',1972-12-03 00:58:24 +green,75,Eric,ethomas22@addtoany.com,'85.2.241.227',1992-11-01 05:59:30 +green,76,Joyce,jarmstrong23@sitemeter.com,'169.224.20.36',1985-10-24 06:50:01 +green,77,Maria,mmartinez24@amazonaws.com,'143.189.167.135',2005-10-05 05:17:42 +green,78,Harry,hburton25@youtube.com,'156.47.176.237',1978-03-26 05:53:33 +green,79,Kevin,klawrence26@hao123.com,'79.136.183.83',1994-10-12 04:38:52 +green,80,David,dhall27@prweb.com,'133.149.172.153',1976-12-15 16:24:24 +green,81,Kathy,kperry28@twitter.com,'229.242.72.228',1979-03-04 02:58:56 +green,82,Adam,aprice29@elegantthemes.com,'13.145.21.10',1982-11-07 11:46:59 +green,83,Brandon,bgriffin2a@va.gov,'73.249.128.212',2013-10-30 05:30:36 +green,84,Henry,hnguyen2b@discovery.com,'211.36.214.242',1985-01-09 06:37:27 +green,85,Eric,esanchez2c@edublogs.org,'191.166.188.251',2004-05-01 23:21:42 +green,86,Jason,jlee2d@jimdo.com,'193.92.16.182',1973-01-08 09:05:39 +green,87,Diana,drichards2e@istockphoto.com,'19.130.175.245',1994-10-05 22:50:49 +green,88,Andrea,awelch2f@abc.net.au,'94.155.233.96',2002-04-26 08:41:44 +green,89,Louis,lwagner2g@miitbeian.gov.cn,'26.217.34.111',2003-08-25 07:56:39 +green,90,Jane,jsims2h@seesaa.net,'43.4.220.135',1987-03-20 20:39:04 +green,91,Larry,lgrant2i@si.edu,'97.126.79.34',2000-09-07 20:26:19 +green,92,Louis,ldean2j@prnewswire.com,'37.148.40.127',2011-09-16 20:12:14 +green,93,Jennifer,jcampbell2k@xing.com,'38.106.254.142',1988-07-15 05:06:49 +green,94,Wayne,wcunningham2l@google.com.hk,'223.28.26.187',2009-12-15 06:16:54 +green,95,Lori,lstevens2m@icq.com,'181.250.181.58',1984-10-28 03:29:19 +green,96,Judy,jsimpson2n@marriott.com,'180.121.239.219',1986-02-07 15:18:10 +green,97,Phillip,phoward2o@usa.gov,'255.247.0.175',2002-12-26 08:44:45 +green,98,Gloria,gwalker2p@usa.gov,'156.140.7.128',1997-10-04 07:58:58 +green,99,Paul,pjohnson2q@umn.edu,'183.59.198.197',1991-11-14 12:33:55 +green,100,Frank,fgreene2r@blogspot.com,'150.143.68.121',2010-06-12 23:55:39 +""" + +models__schema_yml = """ +version: 2 +sources: + - name: my_source + overrides: localdep + schema: "{{ target.schema }}" + database: "{{ target.database }}" + freshness: + error_after: {count: 3, period: day} + tables: + - name: my_table + freshness: null + identifier: my_real_seed + # on the override, the "color" column is only unique, it can be null! + columns: + - name: id + data_tests: + - not_null + - unique + - name: color + data_tests: + - unique + - name: my_other_table + freshness: null + identifier: my_real_other_seed + - name: snapshot_freshness + identifier: snapshot_freshness_base + loaded_at_field: updated_at + freshness: + error_after: {count: 1, period: day} + +""" + +seeds__expected_result_csv = """letter,color +c,cyan +m,magenta +y,yellow +k,key +""" + +seeds__my_real_other_seed_csv = """id,letter +1,c +2,m +3,y +4,k +""" + +seeds__my_real_seed_csv = """id,color +1,cyan +2,magenta +3,yellow +4,key +5,NULL +""" + + +@pytest.fixture(scope="class") +def local_dependency(): + return { + "dbt_project.yml": local_dependency__dbt_project_yml, + "models": { + "schema.yml": local_dependency__models__schema_yml, + "my_model.sql": local_dependency__models__my_model_sql, + }, + "seeds": { + "my_other_seed.csv": local_dependency__seeds__my_other_seed_csv, + "my_seed.csv": local_dependency__seeds__my_seed_csv, + "keep": { + "never_fresh.csv": local_dependency__seeds__keep__never_fresh_csv, + "snapshot_freshness_base.csv": local_dependency__seeds__keep__snapshot_freshness_base_csv, + }, + }, + } diff --git a/tests/functional/source_overrides/test_simple_source_override.py b/tests/functional/source_overrides/test_simple_source_override.py new file mode 100644 index 000000000..d1cd3352e --- /dev/null +++ b/tests/functional/source_overrides/test_simple_source_override.py @@ -0,0 +1,146 @@ +from datetime import datetime, timedelta + +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import check_relations_equal, run_dbt, update_config_file +import pytest + +from tests.functional.source_overrides.fixtures import ( + local_dependency, + models__schema_yml, + seeds__expected_result_csv, + seeds__my_real_other_seed_csv, + seeds__my_real_seed_csv, +) + + +class TestSourceOverride: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root, local_dependency): # noqa: F811 + write_project_files(project_root, "local_dependency", local_dependency) + + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": models__schema_yml} + + @pytest.fixture(scope="class") + def seeds(self): + return { + "expected_result.csv": seeds__expected_result_csv, + "my_real_other_seed.csv": seeds__my_real_other_seed_csv, + "my_real_seed.csv": seeds__my_real_seed_csv, + } + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "local": "local_dependency", + }, + ] + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "localdep": { + "enabled": False, + "keep": { + "enabled": True, + }, + }, + "quote_columns": False, + }, + "sources": { + "localdep": { + "my_other_source": { + "enabled": False, + } + } + }, + } + + def _set_updated_at_to(self, insert_id, delta, project): + insert_time = datetime.utcnow() + delta + timestr = insert_time.strftime("%Y-%m-%d %H:%M:%S") + # favorite_color,id,first_name,email,ip_address,updated_at + + quoted_columns = ",".join( + project.adapter.quote(c) + for c in ("favorite_color", "id", "first_name", "email", "ip_address", "updated_at") + ) + + kwargs = { + "schema": project.test_schema, + "time": timestr, + "id": insert_id, + "source": project.adapter.quote("snapshot_freshness_base"), + "quoted_columns": quoted_columns, + } + + raw_code = """INSERT INTO {schema}.{source} + ({quoted_columns}) + VALUES ( + 'blue',{id},'Jake','abc@example.com','192.168.1.1','{time}' + )""".format( + **kwargs + ) + + project.run_sql(raw_code) + + return insert_id + 1 + + def test_source_overrides(self, project): + insert_id = 101 + + run_dbt(["deps"]) + + seed_results = run_dbt(["seed"]) + assert len(seed_results) == 5 + + # There should be 7, as we disabled 1 test of the original 8 + test_results = run_dbt(["test"]) + assert len(test_results) == 7 + + results = run_dbt(["run"]) + assert len(results) == 1 + + check_relations_equal(project.adapter, ["expected_result", "my_model"]) + + # set the updated_at field of this seed to last week + insert_id = self._set_updated_at_to(insert_id, timedelta(days=-7), project) + # if snapshot-freshness fails, freshness just didn't happen! + results = run_dbt(["source", "snapshot-freshness"], expect_pass=False) + # we disabled my_other_source, so we only run the one freshness check + # in + assert len(results) == 1 + # If snapshot-freshness passes, that means error_after was + # applied from the source override but not the source table override + insert_id = self._set_updated_at_to(insert_id, timedelta(days=-2), project) + results = run_dbt( + ["source", "snapshot-freshness"], + expect_pass=False, + ) + assert len(results) == 1 + + insert_id = self._set_updated_at_to(insert_id, timedelta(hours=-12), project) + results = run_dbt(["source", "snapshot-freshness"], expect_pass=True) + assert len(results) == 1 + + # update source to be enabled + new_source_config = { + "sources": { + "localdep": { + "my_other_source": { + "enabled": True, + } + } + } + } + update_config_file(new_source_config, project.project_root, "dbt_project.yml") + + # enable my_other_source, snapshot freshness should fail due to the new + # not-fresh source + results = run_dbt(["source", "snapshot-freshness"], expect_pass=False) + assert len(results) == 2 diff --git a/tests/functional/source_overrides/test_source_overrides_duplicate_model.py b/tests/functional/source_overrides/test_source_overrides_duplicate_model.py new file mode 100644 index 000000000..0a9ab0d8d --- /dev/null +++ b/tests/functional/source_overrides/test_source_overrides_duplicate_model.py @@ -0,0 +1,68 @@ +import os + +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import run_dbt +from dbt_common.exceptions import CompilationError +import pytest + +from tests.functional.source_overrides.fixtures import ( + dupe_models__schema1_yml, + dupe_models__schema2_yml, + local_dependency, +) + + +class TestSourceOverrideDuplicates: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root, local_dependency): # noqa: F811 + write_project_files(project_root, "local_dependency", local_dependency) + + @pytest.fixture(scope="class") + def models(self): + return { + "schema2.yml": dupe_models__schema2_yml, + "schema1.yml": dupe_models__schema1_yml, + } + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "local": "local_dependency", + }, + ] + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "localdep": { + "enabled": False, + "keep": { + "enabled": True, + }, + }, + "quote_columns": False, + }, + "sources": { + "localdep": { + "my_other_source": { + "enabled": False, + } + } + }, + } + + def test_source_duplicate_overrides(self, project): + run_dbt(["deps"]) + with pytest.raises(CompilationError) as exc: + run_dbt(["compile"]) + + assert "dbt found two schema.yml entries for the same source named" in str(exc.value) + assert "one of these files" in str(exc.value) + schema1_path = os.path.join("models", "schema1.yml") + schema2_path = os.path.join("models", "schema2.yml") + assert schema1_path in str(exc.value) + assert schema2_path in str(exc.value) diff --git a/tests/functional/sources/common_source_setup.py b/tests/functional/sources/common_source_setup.py new file mode 100644 index 000000000..d4c7890f2 --- /dev/null +++ b/tests/functional/sources/common_source_setup.py @@ -0,0 +1,67 @@ +import os + +from dbt.tests.util import run_dbt +import pytest +import yaml + +from tests.functional.sources.fixtures import ( + models_descendant_model_sql, + models_ephemeral_model_sql, + models_multi_source_model_sql, + models_nonsource_descendant_sql, + models_schema_yml, + models_view_model_sql, + seeds_expected_multi_source_csv, + seeds_other_source_table_csv, + seeds_other_table_csv, + seeds_source_csv, +) + + +class BaseSourcesTest: + @pytest.fixture(scope="class", autouse=True) + def setEnvVars(self): + os.environ["DBT_TEST_SCHEMA_NAME_VARIABLE"] = "test_run_schema" + + yield + + del os.environ["DBT_TEST_SCHEMA_NAME_VARIABLE"] + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_schema_yml, + "view_model.sql": models_view_model_sql, + "ephemeral_model.sql": models_ephemeral_model_sql, + "descendant_model.sql": models_descendant_model_sql, + "multi_source_model.sql": models_multi_source_model_sql, + "nonsource_descendant.sql": models_nonsource_descendant_sql, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "source.csv": seeds_source_csv, + "other_table.csv": seeds_other_table_csv, + "expected_multi_source.csv": seeds_expected_multi_source_csv, + "other_source_table.csv": seeds_other_source_table_csv, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "seed-paths": ["seeds"], + "quoting": {"database": True, "schema": True, "identifier": True}, + "seeds": { + "quote_columns": True, + }, + } + + def run_dbt_with_vars(self, project, cmd, *args, **kwargs): + vars_dict = { + "test_run_schema": project.test_schema, + "test_loaded_at": project.adapter.quote("updated_at"), + } + cmd.extend(["--vars", yaml.safe_dump(vars_dict)]) + return run_dbt(cmd, *args, **kwargs) diff --git a/tests/functional/sources/data/seed.sql b/tests/functional/sources/data/seed.sql new file mode 100644 index 000000000..40110b990 --- /dev/null +++ b/tests/functional/sources/data/seed.sql @@ -0,0 +1,113 @@ +create table {schema}.seed_expected ( + favorite_color TEXT, + id INTEGER, + first_name TEXT, + email TEXT, + ip_address TEXT, + updated_at TIMESTAMP WITHOUT TIME ZONE +); + + +INSERT INTO {schema}.seed_expected + ("favorite_color","id","first_name","email","ip_address","updated_at") +VALUES + ('blue',1,'Larry','lking0@miitbeian.gov.cn','''69.135.206.194''','2008-09-12 19:08:31'), + ('blue',2,'Larry','lperkins1@toplist.cz','''64.210.133.162''','1978-05-09 04:15:14'), + ('blue',3,'Anna','amontgomery2@miitbeian.gov.cn','''168.104.64.114''','2011-10-16 04:07:57'), + ('blue',4,'Sandra','sgeorge3@livejournal.com','''229.235.252.98''','1973-07-19 10:52:43'), + ('blue',5,'Fred','fwoods4@google.cn','''78.229.170.124''','2012-09-30 16:38:29'), + ('blue',6,'Stephen','shanson5@livejournal.com','''182.227.157.105''','1995-11-07 21:40:50'), + ('blue',7,'William','wmartinez6@upenn.edu','''135.139.249.50''','1982-09-05 03:11:59'), + ('blue',8,'Jessica','jlong7@hao123.com','''203.62.178.210''','1991-10-16 11:03:15'), + ('blue',9,'Douglas','dwhite8@tamu.edu','''178.187.247.1''','1979-10-01 09:49:48'), + ('blue',10,'Lisa','lcoleman9@nydailynews.com','''168.234.128.249''','2011-05-26 07:45:49'), + ('blue',11,'Ralph','rfieldsa@home.pl','''55.152.163.149''','1972-11-18 19:06:11'), + ('blue',12,'Louise','lnicholsb@samsung.com','''141.116.153.154''','2014-11-25 20:56:14'), + ('blue',13,'Clarence','cduncanc@sfgate.com','''81.171.31.133''','2011-11-17 07:02:36'), + ('blue',14,'Daniel','dfranklind@omniture.com','''8.204.211.37''','1980-09-13 00:09:04'), + ('blue',15,'Katherine','klanee@auda.org.au','''176.96.134.59''','1997-08-22 19:36:56'), + ('blue',16,'Billy','bwardf@wikia.com','''214.108.78.85''','2003-10-19 02:14:47'), + ('blue',17,'Annie','agarzag@ocn.ne.jp','''190.108.42.70''','1988-10-28 15:12:35'), + ('blue',18,'Shirley','scolemanh@fastcompany.com','''109.251.164.84''','1988-08-24 10:50:57'), + ('blue',19,'Roger','rfrazieri@scribd.com','''38.145.218.108''','1985-12-31 15:17:15'), + ('blue',20,'Lillian','lstanleyj@goodreads.com','''47.57.236.17''','1970-06-08 02:09:05'), + ('blue',21,'Aaron','arodriguezk@nps.gov','''205.245.118.221''','1985-10-11 23:07:49'), + ('blue',22,'Patrick','pparkerl@techcrunch.com','''19.8.100.182''','2006-03-29 12:53:56'), + ('blue',23,'Phillip','pmorenom@intel.com','''41.38.254.103''','2011-11-07 15:35:43'), + ('blue',24,'Henry','hgarcian@newsvine.com','''1.191.216.252''','2008-08-28 08:30:44'), + ('blue',25,'Irene','iturnero@opera.com','''50.17.60.190''','1994-04-01 07:15:02'), + ('blue',26,'Andrew','adunnp@pen.io','''123.52.253.176''','2000-11-01 06:03:25'), + ('blue',27,'David','dgutierrezq@wp.com','''238.23.203.42''','1988-01-25 07:29:18'), + ('blue',28,'Henry','hsanchezr@cyberchimps.com','''248.102.2.185''','1983-01-01 13:36:37'), + ('blue',29,'Evelyn','epetersons@gizmodo.com','''32.80.46.119''','1979-07-16 17:24:12'), + ('blue',30,'Tammy','tmitchellt@purevolume.com','''249.246.167.88''','2001-04-03 10:00:23'), + ('blue',31,'Jacqueline','jlittleu@domainmarket.com','''127.181.97.47''','1986-02-11 21:35:50'), + ('blue',32,'Earl','eortizv@opera.com','''166.47.248.240''','1996-07-06 08:16:27'), + ('blue',33,'Juan','jgordonw@sciencedirect.com','''71.77.2.200''','1987-01-31 03:46:44'), + ('blue',34,'Diane','dhowellx@nyu.edu','''140.94.133.12''','1994-06-11 02:30:05'), + ('blue',35,'Randy','rkennedyy@microsoft.com','''73.255.34.196''','2005-05-26 20:28:39'), + ('blue',36,'Janice','jriveraz@time.com','''22.214.227.32''','1990-02-09 04:16:52'), + ('blue',37,'Laura','lperry10@diigo.com','''159.148.145.73''','2015-03-17 05:59:25'), + ('blue',38,'Gary','gray11@statcounter.com','''40.193.124.56''','1970-01-27 10:04:51'), + ('blue',39,'Jesse','jmcdonald12@typepad.com','''31.7.86.103''','2009-03-14 08:14:29'), + ('blue',40,'Sandra','sgonzalez13@goodreads.com','''223.80.168.239''','1993-05-21 14:08:54'), + ('blue',41,'Scott','smoore14@archive.org','''38.238.46.83''','1980-08-30 11:16:56'), + ('blue',42,'Phillip','pevans15@cisco.com','''158.234.59.34''','2011-12-15 23:26:31'), + ('blue',43,'Steven','sriley16@google.ca','''90.247.57.68''','2011-10-29 19:03:28'), + ('blue',44,'Deborah','dbrown17@hexun.com','''179.125.143.240''','1995-04-10 14:36:07'), + ('blue',45,'Lori','lross18@ow.ly','''64.80.162.180''','1980-12-27 16:49:15'), + ('blue',46,'Sean','sjackson19@tumblr.com','''240.116.183.69''','1988-06-12 21:24:45'), + ('blue',47,'Terry','tbarnes1a@163.com','''118.38.213.137''','1997-09-22 16:43:19'), + ('blue',48,'Dorothy','dross1b@ebay.com','''116.81.76.49''','2005-02-28 13:33:24'), + ('blue',49,'Samuel','swashington1c@house.gov','''38.191.253.40''','1989-01-19 21:15:48'), + ('blue',50,'Ralph','rcarter1d@tinyurl.com','''104.84.60.174''','2007-08-11 10:21:49'), + ('green',51,'Wayne','whudson1e@princeton.edu','''90.61.24.102''','1983-07-03 16:58:12'), + ('green',52,'Rose','rjames1f@plala.or.jp','''240.83.81.10''','1995-06-08 11:46:23'), + ('green',53,'Louise','lcox1g@theglobeandmail.com','''105.11.82.145''','2016-09-19 14:45:51'), + ('green',54,'Kenneth','kjohnson1h@independent.co.uk','''139.5.45.94''','1976-08-17 11:26:19'), + ('green',55,'Donna','dbrown1i@amazon.co.uk','''19.45.169.45''','2006-05-27 16:51:40'), + ('green',56,'Johnny','jvasquez1j@trellian.com','''118.202.238.23''','1975-11-17 08:42:32'), + ('green',57,'Patrick','pramirez1k@tamu.edu','''231.25.153.198''','1997-08-06 11:51:09'), + ('green',58,'Helen','hlarson1l@prweb.com','''8.40.21.39''','1993-08-04 19:53:40'), + ('green',59,'Patricia','pspencer1m@gmpg.org','''212.198.40.15''','1977-08-03 16:37:27'), + ('green',60,'Joseph','jspencer1n@marriott.com','''13.15.63.238''','2005-07-23 20:22:06'), + ('green',61,'Phillip','pschmidt1o@blogtalkradio.com','''177.98.201.190''','1976-05-19 21:47:44'), + ('green',62,'Joan','jwebb1p@google.ru','''105.229.170.71''','1972-09-07 17:53:47'), + ('green',63,'Phyllis','pkennedy1q@imgur.com','''35.145.8.244''','2000-01-01 22:33:37'), + ('green',64,'Katherine','khunter1r@smh.com.au','''248.168.205.32''','1991-01-09 06:40:24'), + ('green',65,'Laura','lvasquez1s@wiley.com','''128.129.115.152''','1997-10-23 12:04:56'), + ('green',66,'Juan','jdunn1t@state.gov','''44.228.124.51''','2004-11-10 05:07:35'), + ('green',67,'Judith','jholmes1u@wiley.com','''40.227.179.115''','1977-08-02 17:01:45'), + ('green',68,'Beverly','bbaker1v@wufoo.com','''208.34.84.59''','2016-03-06 20:07:23'), + ('green',69,'Lawrence','lcarr1w@flickr.com','''59.158.212.223''','1988-09-13 06:07:21'), + ('green',70,'Gloria','gwilliams1x@mtv.com','''245.231.88.33''','1995-03-18 22:32:46'), + ('green',71,'Steven','ssims1y@cbslocal.com','''104.50.58.255''','2001-08-05 21:26:20'), + ('green',72,'Betty','bmills1z@arstechnica.com','''103.177.214.220''','1981-12-14 21:26:54'), + ('green',73,'Mildred','mfuller20@prnewswire.com','''151.158.8.130''','2000-04-19 10:13:55'), + ('green',74,'Donald','dday21@icq.com','''9.178.102.255''','1972-12-03 00:58:24'), + ('green',75,'Eric','ethomas22@addtoany.com','''85.2.241.227''','1992-11-01 05:59:30'), + ('green',76,'Joyce','jarmstrong23@sitemeter.com','''169.224.20.36''','1985-10-24 06:50:01'), + ('green',77,'Maria','mmartinez24@amazonaws.com','''143.189.167.135''','2005-10-05 05:17:42'), + ('green',78,'Harry','hburton25@youtube.com','''156.47.176.237''','1978-03-26 05:53:33'), + ('green',79,'Kevin','klawrence26@hao123.com','''79.136.183.83''','1994-10-12 04:38:52'), + ('green',80,'David','dhall27@prweb.com','''133.149.172.153''','1976-12-15 16:24:24'), + ('green',81,'Kathy','kperry28@twitter.com','''229.242.72.228''','1979-03-04 02:58:56'), + ('green',82,'Adam','aprice29@elegantthemes.com','''13.145.21.10''','1982-11-07 11:46:59'), + ('green',83,'Brandon','bgriffin2a@va.gov','''73.249.128.212''','2013-10-30 05:30:36'), + ('green',84,'Henry','hnguyen2b@discovery.com','''211.36.214.242''','1985-01-09 06:37:27'), + ('green',85,'Eric','esanchez2c@edublogs.org','''191.166.188.251''','2004-05-01 23:21:42'), + ('green',86,'Jason','jlee2d@jimdo.com','''193.92.16.182''','1973-01-08 09:05:39'), + ('green',87,'Diana','drichards2e@istockphoto.com','''19.130.175.245''','1994-10-05 22:50:49'), + ('green',88,'Andrea','awelch2f@abc.net.au','''94.155.233.96''','2002-04-26 08:41:44'), + ('green',89,'Louis','lwagner2g@miitbeian.gov.cn','''26.217.34.111''','2003-08-25 07:56:39'), + ('green',90,'Jane','jsims2h@seesaa.net','''43.4.220.135''','1987-03-20 20:39:04'), + ('green',91,'Larry','lgrant2i@si.edu','''97.126.79.34''','2000-09-07 20:26:19'), + ('green',92,'Louis','ldean2j@prnewswire.com','''37.148.40.127''','2011-09-16 20:12:14'), + ('green',93,'Jennifer','jcampbell2k@xing.com','''38.106.254.142''','1988-07-15 05:06:49'), + ('green',94,'Wayne','wcunningham2l@google.com.hk','''223.28.26.187''','2009-12-15 06:16:54'), + ('green',95,'Lori','lstevens2m@icq.com','''181.250.181.58''','1984-10-28 03:29:19'), + ('green',96,'Judy','jsimpson2n@marriott.com','''180.121.239.219''','1986-02-07 15:18:10'), + ('green',97,'Phillip','phoward2o@usa.gov','''255.247.0.175''','2002-12-26 08:44:45'), + ('green',98,'Gloria','gwalker2p@usa.gov','''156.140.7.128''','1997-10-04 07:58:58'), + ('green',99,'Paul','pjohnson2q@umn.edu','''183.59.198.197''','1991-11-14 12:33:55'), + ('green',100,'Frank','fgreene2r@blogspot.com','''150.143.68.121''','2010-06-12 23:55:39'); diff --git a/tests/functional/sources/fixtures.py b/tests/functional/sources/fixtures.py new file mode 100644 index 000000000..b40b18695 --- /dev/null +++ b/tests/functional/sources/fixtures.py @@ -0,0 +1,474 @@ +error_models_schema_yml = """version: 2 +sources: + - name: test_source + loader: custom + freshness: + warn_after: {count: 10, period: hour} + error_after: {count: 1, period: day} + schema: invalid + tables: + - name: test_table + identifier: source + loaded_at_field: updated_at +""" + +error_models_model_sql = """select * from {{ source('test_source', 'test_table') }} +""" + +override_freshness_models_schema_yml = """version: 2 +sources: + - name: test_source + loader: custom + freshness: # default freshness + warn_after: {count: 12, period: hour} + error_after: {count: 24, period: hour} + schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" + loaded_at_field: loaded_at + quoting: + identifier: True + tags: + - my_test_source_tag + tables: + - name: source_a + identifier: source + loaded_at_field: "{{ var('test_loaded_at') | as_text }}" + freshness: + warn_after: {count: 6, period: hour} + # use the default error_after defined above + - name: source_b + identifier: source + loaded_at_field: "{{ var('test_loaded_at') | as_text }}" + freshness: + warn_after: {count: 6, period: hour} + error_after: {} # use the default error_after defined above + - name: source_c + identifier: source + loaded_at_field: "{{ var('test_loaded_at') | as_text }}" + freshness: + warn_after: {count: 6, period: hour} + error_after: null # override: disable error_after for this table + - name: source_d + identifier: source + loaded_at_field: "{{ var('test_loaded_at') | as_text }}" + freshness: + warn_after: {count: 6, period: hour} + error_after: {count: 72, period: hour} # override: use this new behavior instead of error_after defined above + - name: source_e + identifier: source + loaded_at_field: "{{ var('test_loaded_at') | as_text }}" + freshness: null # override: disable freshness for this table +""" + +models_schema_yml = """version: 2 +models: + - name: descendant_model + columns: + - name: favorite_color + data_tests: + - relationships: + to: source('test_source', 'test_table') + field: favorite_color + - name: id + data_tests: + - unique + - not_null + +sources: + - name: test_source + loader: custom + freshness: + warn_after: {count: 10, period: hour} + error_after: {count: 1, period: day} + schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" + quoting: + identifier: True + tags: + - my_test_source_tag + tables: + - name: test_table + identifier: source + loaded_at_field: "{{ var('test_loaded_at') | as_text }}" + freshness: + error_after: {count: 18, period: hour} + tags: + - my_test_source_table_tag + columns: + - name: favorite_color + description: The favorite color + - name: id + description: The user ID + data_tests: + - unique + - not_null + tags: + - id_column + - name: first_name + description: The first name of the user + data_tests: [] + - name: email + description: The email address of the user + - name: ip_address + description: The last IP address the user logged in from + - name: updated_at + description: The last update time for this user + data_tests: + - relationships: + # do this as a table-level test, just to test out that aspect + column_name: favorite_color + to: ref('descendant_model') + field: favorite_color + - name: other_test_table + identifier: other_table + freshness: null + columns: + - name: id + data_tests: + - not_null + - unique + tags: + - id_column + - name: disabled_test_table + freshness: null + loaded_at_field: "{{ var('test_loaded_at') | as_text }}" + - name: other_source + schema: "{{ var('test_run_schema') }}" + quoting: + identifier: True + tables: + - name: test_table + identifier: other_source_table + - name: external_source + schema: "{{ var('test_run_alt_schema', var('test_run_schema')) }}" + tables: + - name: table +""" + +models_view_model_sql = """{# See here: https://github.com/dbt-labs/dbt-core/pull/1729 #} + +select * from {{ ref('ephemeral_model') }} +""" + +models_ephemeral_model_sql = """{{ config(materialized='ephemeral') }} + +select 1 as id +""" + +models_descendant_model_sql = """select * from {{ source('test_source', 'test_table') }} +""" + +models_multi_source_model_sql = """select * from {{ source('test_source', 'other_test_table')}} + join {{ source('other_source', 'test_table')}} using (id) +""" + +models_nonsource_descendant_sql = """select * from {{ schema }}.source +""" + +models_newly_added_model_sql = """select 2 as id""" + +models_newly_added_error_model_sql = """select error from fake_table""" + +malformed_models_schema_yml = """version: 2 +sources: + - name: test_source + loader: custom + schema: "{{ var('test_run_schema') }}" + tables: + - name: test_table + identifier: source + data_tests: + - relationships: + # this is invalid (list of 3 1-key dicts instead of a single 3-key dict) + - column_name: favorite_color + - to: ref('descendant_model') + - field: favorite_color +""" + +malformed_models_descendant_model_sql = """select * from {{ source('test_source', 'test_table') }} +""" + +filtered_models_schema_yml = """version: 2 +sources: + - name: test_source + loader: custom + freshness: + warn_after: {count: 10, period: hour} + error_after: {count: 1, period: day} + filter: id > 1 + schema: "{{ var(env_var('DBT_TEST_SCHEMA_NAME_VARIABLE')) }}" + quoting: + identifier: True + tables: + - name: test_table + identifier: source + loaded_at_field: updated_at + freshness: + error_after: {count: 18, period: hour} + filter: id > 101 +""" + +macros_macro_sql = """{% macro override_me() -%} + {{ exceptions.raise_compiler_error('this is a bad macro') }} +{%- endmacro %} + +{% macro happy_little_macro() -%} + {{ override_me() }} +{%- endmacro %} + + +{% macro vacuum_source(source_name, table_name) -%} + {% call statement('stmt', auto_begin=false, fetch_result=false) %} + vacuum {{ source(source_name, table_name) }} + {% endcall %} +{%- endmacro %} +""" + +seeds_source_csv = """favorite_color,id,first_name,email,ip_address,updated_at +blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31 +blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14 +blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57 +blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43 +blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29 +blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50 +blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59 +blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15 +blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48 +blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49 +blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11 +blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14 +blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36 +blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04 +blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56 +blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47 +blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35 +blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57 +blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15 +blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05 +blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49 +blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56 +blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43 +blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44 +blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02 +blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25 +blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18 +blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37 +blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12 +blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23 +blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50 +blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27 +blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44 +blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05 +blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39 +blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52 +blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25 +blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51 +blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29 +blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54 +blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56 +blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31 +blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28 +blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07 +blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15 +blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45 +blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19 +blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24 +blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48 +blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49 +green,51,Wayne,whudson1e@princeton.edu,'90.61.24.102',1983-07-03 16:58:12 +green,52,Rose,rjames1f@plala.or.jp,'240.83.81.10',1995-06-08 11:46:23 +green,53,Louise,lcox1g@theglobeandmail.com,'105.11.82.145',2016-09-19 14:45:51 +green,54,Kenneth,kjohnson1h@independent.co.uk,'139.5.45.94',1976-08-17 11:26:19 +green,55,Donna,dbrown1i@amazon.co.uk,'19.45.169.45',2006-05-27 16:51:40 +green,56,Johnny,jvasquez1j@trellian.com,'118.202.238.23',1975-11-17 08:42:32 +green,57,Patrick,pramirez1k@tamu.edu,'231.25.153.198',1997-08-06 11:51:09 +green,58,Helen,hlarson1l@prweb.com,'8.40.21.39',1993-08-04 19:53:40 +green,59,Patricia,pspencer1m@gmpg.org,'212.198.40.15',1977-08-03 16:37:27 +green,60,Joseph,jspencer1n@marriott.com,'13.15.63.238',2005-07-23 20:22:06 +green,61,Phillip,pschmidt1o@blogtalkradio.com,'177.98.201.190',1976-05-19 21:47:44 +green,62,Joan,jwebb1p@google.ru,'105.229.170.71',1972-09-07 17:53:47 +green,63,Phyllis,pkennedy1q@imgur.com,'35.145.8.244',2000-01-01 22:33:37 +green,64,Katherine,khunter1r@smh.com.au,'248.168.205.32',1991-01-09 06:40:24 +green,65,Laura,lvasquez1s@wiley.com,'128.129.115.152',1997-10-23 12:04:56 +green,66,Juan,jdunn1t@state.gov,'44.228.124.51',2004-11-10 05:07:35 +green,67,Judith,jholmes1u@wiley.com,'40.227.179.115',1977-08-02 17:01:45 +green,68,Beverly,bbaker1v@wufoo.com,'208.34.84.59',2016-03-06 20:07:23 +green,69,Lawrence,lcarr1w@flickr.com,'59.158.212.223',1988-09-13 06:07:21 +green,70,Gloria,gwilliams1x@mtv.com,'245.231.88.33',1995-03-18 22:32:46 +green,71,Steven,ssims1y@cbslocal.com,'104.50.58.255',2001-08-05 21:26:20 +green,72,Betty,bmills1z@arstechnica.com,'103.177.214.220',1981-12-14 21:26:54 +green,73,Mildred,mfuller20@prnewswire.com,'151.158.8.130',2000-04-19 10:13:55 +green,74,Donald,dday21@icq.com,'9.178.102.255',1972-12-03 00:58:24 +green,75,Eric,ethomas22@addtoany.com,'85.2.241.227',1992-11-01 05:59:30 +green,76,Joyce,jarmstrong23@sitemeter.com,'169.224.20.36',1985-10-24 06:50:01 +green,77,Maria,mmartinez24@amazonaws.com,'143.189.167.135',2005-10-05 05:17:42 +green,78,Harry,hburton25@youtube.com,'156.47.176.237',1978-03-26 05:53:33 +green,79,Kevin,klawrence26@hao123.com,'79.136.183.83',1994-10-12 04:38:52 +green,80,David,dhall27@prweb.com,'133.149.172.153',1976-12-15 16:24:24 +green,81,Kathy,kperry28@twitter.com,'229.242.72.228',1979-03-04 02:58:56 +green,82,Adam,aprice29@elegantthemes.com,'13.145.21.10',1982-11-07 11:46:59 +green,83,Brandon,bgriffin2a@va.gov,'73.249.128.212',2013-10-30 05:30:36 +green,84,Henry,hnguyen2b@discovery.com,'211.36.214.242',1985-01-09 06:37:27 +green,85,Eric,esanchez2c@edublogs.org,'191.166.188.251',2004-05-01 23:21:42 +green,86,Jason,jlee2d@jimdo.com,'193.92.16.182',1973-01-08 09:05:39 +green,87,Diana,drichards2e@istockphoto.com,'19.130.175.245',1994-10-05 22:50:49 +green,88,Andrea,awelch2f@abc.net.au,'94.155.233.96',2002-04-26 08:41:44 +green,89,Louis,lwagner2g@miitbeian.gov.cn,'26.217.34.111',2003-08-25 07:56:39 +green,90,Jane,jsims2h@seesaa.net,'43.4.220.135',1987-03-20 20:39:04 +green,91,Larry,lgrant2i@si.edu,'97.126.79.34',2000-09-07 20:26:19 +green,92,Louis,ldean2j@prnewswire.com,'37.148.40.127',2011-09-16 20:12:14 +green,93,Jennifer,jcampbell2k@xing.com,'38.106.254.142',1988-07-15 05:06:49 +green,94,Wayne,wcunningham2l@google.com.hk,'223.28.26.187',2009-12-15 06:16:54 +green,95,Lori,lstevens2m@icq.com,'181.250.181.58',1984-10-28 03:29:19 +green,96,Judy,jsimpson2n@marriott.com,'180.121.239.219',1986-02-07 15:18:10 +green,97,Phillip,phoward2o@usa.gov,'255.247.0.175',2002-12-26 08:44:45 +green,98,Gloria,gwalker2p@usa.gov,'156.140.7.128',1997-10-04 07:58:58 +green,99,Paul,pjohnson2q@umn.edu,'183.59.198.197',1991-11-14 12:33:55 +green,100,Frank,fgreene2r@blogspot.com,'150.143.68.121',2010-06-12 23:55:39 +""" + +seeds_other_table_csv = """id,first_name +1,Larry +2,Curly +3,Moe +""" + +seeds_expected_multi_source_csv = """id,first_name,color +1,Larry,blue +2,Curly,red +3,Moe,green +""" + +seeds_other_source_table_csv = """id,color +1,blue +2,red +3,green +""" + +malformed_schema_tests_schema_yml = """version: 2 +sources: + - name: test_source + schema: "{{ var('test_run_schema') }}" + tables: + - name: test_table + identifier: source + columns: + - name: favorite_color + data_tests: + - relationships: + to: ref('model') + # this will get rendered as its literal + field: "{{ 'favorite' ~ 'color' }}" +""" + +malformed_schema_tests_model_sql = """select * from {{ source('test_source', 'test_table') }} +""" + +basic_source_schema_yml = """version: 2 + +sources: + - name: test_source + tables: + - name: test_table + - name: other_source + tables: + - name: test_table +""" + +disabled_source_level_schema_yml = """version: 2 + +sources: + - name: test_source + config: + enabled: False + tables: + - name: test_table + - name: disabled_test_table +""" + +disabled_source_table_schema_yml = """version: 2 + +sources: + - name: test_source + tables: + - name: test_table + - name: disabled_test_table + config: + enabled: False +""" + +all_configs_everywhere_schema_yml = """version: 2 + +sources: + - name: test_source + config: + enabled: False + tables: + - name: test_table + config: + enabled: True + - name: other_test_table +""" + +all_configs_not_table_schema_yml = """version: 2 + +sources: + - name: test_source + config: + enabled: True + tables: + - name: test_table + - name: other_test_table +""" + +all_configs_project_source_schema_yml = """version: 2 + +sources: + - name: test_source + tables: + - name: test_table + config: + enabled: True + - name: other_test_table +""" + +invalid_config_source_schema_yml = """version: 2 + +sources: + - name: test_source + tables: + - name: test_table + config: + enabled: True and False + - name: other_test_table +""" + + +collect_freshness_macro_override_previous_return_signature = """ +{% macro collect_freshness(source, loaded_at_field, filter) %} + {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%} + select + max({{ loaded_at_field }}) as max_loaded_at, + {{ current_timestamp() }} as snapshotted_at + from {{ source }} + {% if filter %} + where {{ filter }} + {% endif %} + {% endcall %} + {{ return(load_result('collect_freshness').table) }} +{% endmacro %} +""" + + +freshness_via_metadata_schema_yml = """version: 2 +sources: + - name: test_source + loader: custom + freshness: + warn_after: {count: 10, period: hour} + error_after: {count: 1, period: day} + schema: my_schema + quoting: + identifier: True + tables: + - name: test_table + identifier: source +""" diff --git a/tests/functional/sources/test_simple_source.py b/tests/functional/sources/test_simple_source.py new file mode 100644 index 000000000..9aad0fee9 --- /dev/null +++ b/tests/functional/sources/test_simple_source.py @@ -0,0 +1,196 @@ +import os + +from dbt.tests.util import ( + check_relations_equal, + check_table_does_not_exist, + run_dbt, + update_config_file, +) +from dbt.exceptions import ParsingError +import pytest +import yaml + +from tests.functional.sources.common_source_setup import BaseSourcesTest +from tests.functional.sources.fixtures import ( + macros_macro_sql, + malformed_models_descendant_model_sql, + malformed_models_schema_yml, + malformed_schema_tests_schema_yml, + malformed_schema_tests_model_sql, +) + + +class SuccessfulSourcesTest(BaseSourcesTest): + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + self.run_dbt_with_vars(project, ["seed"]) + os.environ["DBT_ENV_CUSTOM_ENV_key"] = "value" + + yield + + del os.environ["DBT_ENV_CUSTOM_ENV_key"] + + @pytest.fixture(scope="class") + def macros(self): + return {"macro.sql": macros_macro_sql} + + def _create_schemas(self, project): + schema = self.alternative_schema(project.test_schema) + project.run_sql(f"drop schema if exists {schema} cascade") + project.run_sql(f"create schema {schema}") + + def alternative_schema(self, test_schema): + return test_schema + "_other" + + @pytest.fixture(scope="class", autouse=True) + def createDummyTables(self, project): + self._create_schemas(project) + project.run_sql("create table {}.dummy_table (id int)".format(project.test_schema)) + project.run_sql( + "create view {}.external_view as (select * from {}.dummy_table)".format( + self.alternative_schema(project.test_schema), project.test_schema + ) + ) + + def run_dbt_with_vars(self, project, cmd, *args, **kwargs): + vars_dict = { + "test_run_schema": project.test_schema, + "test_run_alt_schema": self.alternative_schema(project.test_schema), + "test_loaded_at": project.adapter.quote("updated_at"), + } + cmd.extend(["--vars", yaml.safe_dump(vars_dict)]) + return run_dbt(cmd, *args, **kwargs) + + +class TestBasicSource(SuccessfulSourcesTest): + def test_basic_source_def(self, project): + results = self.run_dbt_with_vars(project, ["run"]) + assert len(results) == 4 + + check_relations_equal( + project.adapter, ["source", "descendant_model", "nonsource_descendant"] + ) + check_relations_equal(project.adapter, ["expected_multi_source", "multi_source_model"]) + results = self.run_dbt_with_vars(project, ["test"]) + assert len(results) == 8 + + +class TestSourceSelector(SuccessfulSourcesTest): + def test_source_selector(self, project): + # only one of our models explicitly depends upon a source + results = self.run_dbt_with_vars( + project, ["run", "--models", "source:test_source.test_table+"] + ) + assert len(results) == 1 + check_relations_equal(project.adapter, ["source", "descendant_model"]) + check_table_does_not_exist(project.adapter, "nonsource_descendant") + check_table_does_not_exist(project.adapter, "multi_source_model") + + # do the same thing, but with tags + results = self.run_dbt_with_vars( + project, ["run", "--models", "tag:my_test_source_table_tag+"] + ) + assert len(results) == 1 + + results = self.run_dbt_with_vars( + project, ["test", "--models", "source:test_source.test_table+"] + ) + assert len(results) == 6 + + results = self.run_dbt_with_vars( + project, ["test", "--models", "tag:my_test_source_table_tag+"] + ) + assert len(results) == 6 + + results = self.run_dbt_with_vars(project, ["test", "--models", "tag:my_test_source_tag+"]) + # test_table + other_test_table + assert len(results) == 8 + + results = self.run_dbt_with_vars(project, ["test", "--models", "tag:id_column"]) + # all 4 id column tests + assert len(results) == 4 + + +class TestEmptySource(SuccessfulSourcesTest): + def test_empty_source_def(self, project): + # sources themselves can never be selected, so nothing should be run + results = self.run_dbt_with_vars( + project, ["run", "--models", "source:test_source.test_table"] + ) + check_table_does_not_exist(project.adapter, "nonsource_descendant") + check_table_does_not_exist(project.adapter, "multi_source_model") + check_table_does_not_exist(project.adapter, "descendant_model") + assert len(results) == 0 + + +class TestSourceDef(SuccessfulSourcesTest): + def test_source_only_def(self, project): + results = self.run_dbt_with_vars(project, ["run", "--models", "source:other_source+"]) + assert len(results) == 1 + check_relations_equal(project.adapter, ["expected_multi_source", "multi_source_model"]) + check_table_does_not_exist(project.adapter, "nonsource_descendant") + check_table_does_not_exist(project.adapter, "descendant_model") + + results = self.run_dbt_with_vars(project, ["run", "--models", "source:test_source+"]) + assert len(results) == 2 + check_relations_equal(project.adapter, ["source", "descendant_model"]) + check_relations_equal(project.adapter, ["expected_multi_source", "multi_source_model"]) + check_table_does_not_exist(project.adapter, "nonsource_descendant") + + +class TestSourceChildrenParents(SuccessfulSourcesTest): + def test_source_childrens_parents(self, project): + results = self.run_dbt_with_vars(project, ["run", "--models", "@source:test_source"]) + assert len(results) == 2 + check_relations_equal(project.adapter, ["source", "descendant_model"]) + check_relations_equal(project.adapter, ["expected_multi_source", "multi_source_model"]) + check_table_does_not_exist(project.adapter, "nonsource_descendant") + + +class TestSourceRunOperation(SuccessfulSourcesTest): + def test_run_operation_source(self, project): + kwargs = '{"source_name": "test_source", "table_name": "test_table"}' + self.run_dbt_with_vars(project, ["run-operation", "vacuum_source", "--args", kwargs]) + + +class TestMalformedSources(BaseSourcesTest): + # even seeds should fail, because parsing is what's raising + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": malformed_models_schema_yml, + "descendant_model.sql": malformed_models_descendant_model_sql, + } + + def test_malformed_schema_will_break_run(self, project): + with pytest.raises(ParsingError): + self.run_dbt_with_vars(project, ["seed"]) + + +class TestRenderingInSourceTests(BaseSourcesTest): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": malformed_schema_tests_schema_yml, + "model.sql": malformed_schema_tests_model_sql, + } + + def test_render_in_source_tests(self, project): + self.run_dbt_with_vars(project, ["seed"]) + self.run_dbt_with_vars(project, ["run"]) + # syntax error at or near "{", because the test isn't rendered + self.run_dbt_with_vars(project, ["test"], expect_pass=False) + + +class TestUnquotedSources(SuccessfulSourcesTest): + def test_catalog(self, project): + new_quoting_config = { + "quoting": { + "identifier": False, + "schema": False, + "database": False, + } + } + update_config_file(new_quoting_config, project.project_root, "dbt_project.yml") + self.run_dbt_with_vars(project, ["run"]) + self.run_dbt_with_vars(project, ["docs", "generate"]) diff --git a/tests/functional/sources/test_source_configs.py b/tests/functional/sources/test_source_configs.py new file mode 100644 index 000000000..1516fd432 --- /dev/null +++ b/tests/functional/sources/test_source_configs.py @@ -0,0 +1,181 @@ +from dbt.tests.util import get_manifest, run_dbt, update_config_file +from dbt.contracts.graph.model_config import SourceConfig +from dbt_common.dataclass_schema import ValidationError +import pytest + +from tests.functional.sources.fixtures import ( + all_configs_everywhere_schema_yml, + all_configs_not_table_schema_yml, + all_configs_project_source_schema_yml, + basic_source_schema_yml, + disabled_source_level_schema_yml, + disabled_source_table_schema_yml, + invalid_config_source_schema_yml, +) + + +class SourceConfigTests: + @pytest.fixture(scope="class", autouse=True) + def setUp(self): + pytest.expected_config = SourceConfig( + enabled=True, + ) + + +# Test enabled config in dbt_project.yml +# expect pass, already implemented +class TestSourceEnabledConfigProjectLevel(SourceConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": basic_source_schema_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "sources": { + "test": { + "test_source": { + "enabled": True, + }, + } + } + } + + def test_enabled_source_config_dbt_project(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.test_table" in manifest.sources + + new_enabled_config = { + "sources": { + "test": { + "test_source": { + "enabled": False, + }, + } + } + } + update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + assert ( + "source.test.test_source.test_table" not in manifest.sources + ) # or should it be there with enabled: false?? + assert "source.test.other_source.test_table" in manifest.sources + + +# Test enabled config at sources level in yml file +class TestConfigYamlSourceLevel(SourceConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": disabled_source_level_schema_yml, + } + + def test_source_config_yaml_source_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.test_table" not in manifest.sources + assert "source.test.test_source.disabled_test_table" not in manifest.sources + + +# Test enabled config at source table level in yaml file +class TestConfigYamlSourceTable(SourceConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": disabled_source_table_schema_yml, + } + + def test_source_config_yaml_source_table(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.test_table" in manifest.sources + assert "source.test.test_source.disabled_test_table" not in manifest.sources + + +# Test inheritence - set configs at project, source, and source-table level - expect source-table level to win +class TestSourceConfigsInheritence1(SourceConfigTests): + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": all_configs_everywhere_schema_yml} + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"sources": {"enabled": True}} + + def test_source_all_configs_source_table(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.test_table" in manifest.sources + assert "source.test.test_source.other_test_table" not in manifest.sources + config_test_table = manifest.sources.get("source.test.test_source.test_table").config + + assert isinstance(config_test_table, SourceConfig) + assert config_test_table == pytest.expected_config + + +# Test inheritence - set configs at project and source level - expect source level to win +class TestSourceConfigsInheritence2(SourceConfigTests): + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": all_configs_not_table_schema_yml} + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"sources": {"enabled": False}} + + def test_source_two_configs_source_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.test_table" in manifest.sources + assert "source.test.test_source.other_test_table" in manifest.sources + config_test_table = manifest.sources.get("source.test.test_source.test_table").config + config_other_test_table = manifest.sources.get( + "source.test.test_source.other_test_table" + ).config + + assert isinstance(config_test_table, SourceConfig) + assert isinstance(config_other_test_table, SourceConfig) + + assert config_test_table == config_other_test_table + assert config_test_table == pytest.expected_config + + +# Test inheritence - set configs at project and source-table level - expect source-table level to win +class TestSourceConfigsInheritence3(SourceConfigTests): + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": all_configs_project_source_schema_yml} + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"sources": {"enabled": False}} + + def test_source_two_configs_source_table(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.test_table" in manifest.sources + assert "source.test.test_source.other_test_table" not in manifest.sources + config_test_table = manifest.sources.get("source.test.test_source.test_table").config + + assert isinstance(config_test_table, SourceConfig) + assert config_test_table == pytest.expected_config + + +# Test invalid source configs +class TestInvalidSourceConfig(SourceConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": invalid_config_source_schema_yml, + } + + def test_invalid_config_source(self, project): + with pytest.raises(ValidationError) as excinfo: + run_dbt(["parse"]) + expected_msg = "'True and False' is not of type 'boolean'" + assert expected_msg in str(excinfo.value) diff --git a/tests/functional/sources/test_source_fresher_state.py b/tests/functional/sources/test_source_fresher_state.py new file mode 100644 index 000000000..1b885b09e --- /dev/null +++ b/tests/functional/sources/test_source_fresher_state.py @@ -0,0 +1,705 @@ +from datetime import datetime, timedelta +import json +import os +import shutil + +from dbt.contracts.results import FreshnessExecutionResultArtifact +from dbt.tests.util import AnyStringWith, AnyFloat +from dbt_common.exceptions import DbtInternalError +import pytest + +from dbt.adapters.__about__ import version as DBT_POSTGRES_VERSION +from tests.functional.sources.common_source_setup import BaseSourcesTest +from tests.functional.sources.fixtures import ( + error_models_schema_yml, + models_newly_added_error_model_sql, + models_newly_added_model_sql, +) + + +# TODO: We may create utility classes to handle reusable fixtures. +def copy_to_previous_state(): + shutil.copyfile("target/manifest.json", "previous_state/manifest.json") + shutil.copyfile("target/run_results.json", "previous_state/run_results.json") + + +class SuccessfulSourceFreshnessTest(BaseSourcesTest): + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + self.run_dbt_with_vars(project, ["seed"]) + pytest._id = 101 + pytest.freshness_start_time = datetime.utcnow() + # this is the db initial value + pytest.last_inserted_time = "2016-09-19T14:45:51+00:00" + + os.environ["DBT_ENV_CUSTOM_ENV_key"] = "value" + + yield + + del os.environ["DBT_ENV_CUSTOM_ENV_key"] + + def _set_updated_at_to(self, project, delta): + insert_time = datetime.utcnow() + delta + timestr = insert_time.strftime("%Y-%m-%d %H:%M:%S") + # favorite_color,id,first_name,email,ip_address,updated_at + insert_id = pytest._id + pytest._id += 1 + quoted_columns = ",".join( + project.adapter.quote(c) + for c in ("favorite_color", "id", "first_name", "email", "ip_address", "updated_at") + ) + kwargs = { + "schema": project.test_schema, + "time": timestr, + "id": insert_id, + "source": project.adapter.quote("source"), + "quoted_columns": quoted_columns, + } + raw_code = """INSERT INTO {schema}.{source} + ({quoted_columns}) + VALUES ( + 'blue',{id},'Jake','abc@example.com','192.168.1.1','{time}' + )""".format( + **kwargs + ) + project.run_sql(raw_code) + pytest.last_inserted_time = insert_time.strftime("%Y-%m-%dT%H:%M:%S+00:00") + + def assertBetween(self, timestr, start, end=None): + datefmt = "%Y-%m-%dT%H:%M:%S.%fZ" + if end is None: + end = datetime.utcnow() + + parsed = datetime.strptime(timestr, datefmt) + + assert start <= parsed + assert end >= parsed + + def _assert_freshness_results(self, path, state): + assert os.path.exists(path) + with open(path) as fp: + data = json.load(fp) + + try: + FreshnessExecutionResultArtifact.validate(data) + except Exception: + raise pytest.fail("FreshnessExecutionResultArtifact did not validate") + assert set(data) == {"metadata", "results", "elapsed_time"} + assert "generated_at" in data["metadata"] + assert isinstance(data["elapsed_time"], float) + self.assertBetween(data["metadata"]["generated_at"], pytest.freshness_start_time) + assert ( + data["metadata"]["dbt_schema_version"] + == "https://schemas.getdbt.com/dbt/sources/v3.json" + ) + assert data["metadata"]["dbt_version"] == DBT_POSTGRES_VERSION + key = "key" + if os.name == "nt": + key = key.upper() + assert data["metadata"]["env"] == {key: "value"} + + last_inserted_time = pytest.last_inserted_time + + assert len(data["results"]) == 1 + + assert data["results"] == [ + { + "unique_id": "source.test.test_source.test_table", + "max_loaded_at": last_inserted_time, + "snapshotted_at": AnyStringWith(), + "max_loaded_at_time_ago_in_s": AnyFloat(), + "status": state, + "criteria": { + "filter": None, + "warn_after": {"count": 10, "period": "hour"}, + "error_after": {"count": 18, "period": "hour"}, + }, + "adapter_response": {"_message": "SELECT 1", "code": "SELECT", "rows_affected": 1}, + "thread_id": AnyStringWith("Thread-"), + "execution_time": AnyFloat(), + "timing": [ + { + "name": "compile", + "started_at": AnyStringWith(), + "completed_at": AnyStringWith(), + }, + { + "name": "execute", + "started_at": AnyStringWith(), + "completed_at": AnyStringWith(), + }, + ], + } + ] + + +class TestSourceFresherNothingToDo(SuccessfulSourceFreshnessTest): + def test_source_fresher_nothing_to_do(self, project): + self.run_dbt_with_vars(project, ["run"]) + self._set_updated_at_to(project, timedelta(hours=-2)) + previous_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "previous_state/sources.json"] + ) + self._assert_freshness_results("previous_state/sources.json", "pass") + copy_to_previous_state() + + current_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "target/sources.json"] + ) + self._assert_freshness_results("target/sources.json", "pass") + + assert previous_state_results[0].max_loaded_at == current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["test", "-s", "source_status:fresher+", "--defer", "--state", "./previous_state"], + ) + assert source_fresher_results.results == [] + + +class TestSourceFresherRun(SuccessfulSourceFreshnessTest): + def test_source_fresher_run_error(self, project): + self.run_dbt_with_vars(project, ["run"]) + previous_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "previous_state/sources.json"], + expect_pass=False, + ) + self._assert_freshness_results("previous_state/sources.json", "error") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-20)) + current_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "target/sources.json"], + expect_pass=False, + ) + self._assert_freshness_results("target/sources.json", "error") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["run", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + assert source_fresher_results.results == [] + + source_fresher_plus_results = self.run_dbt_with_vars( + project, + ["run", "-s", "source_status:fresher+", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_plus_results]) + assert nodes == {"descendant_model"} + + def test_source_fresher_run_warn(self, project): + self.run_dbt_with_vars(project, ["run"]) + self._set_updated_at_to(project, timedelta(hours=-17)) + previous_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "previous_state/sources.json"], + expect_pass=True, + ) + self._assert_freshness_results("previous_state/sources.json", "warn") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-11)) + current_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "target/sources.json"] + ) + self._assert_freshness_results("target/sources.json", "warn") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["run", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + assert source_fresher_results.results == [] + + source_fresher_plus_results = self.run_dbt_with_vars( + project, + ["run", "-s", "source_status:fresher+", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_plus_results]) + assert nodes == {"descendant_model"} + + def test_source_fresher_run_pass(self, project): + self.run_dbt_with_vars(project, ["run"]) + self._set_updated_at_to(project, timedelta(hours=-2)) + previous_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "previous_state/sources.json"] + ) + self._assert_freshness_results("previous_state/sources.json", "pass") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-1)) + current_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "target/sources.json"] + ) + self._assert_freshness_results("target/sources.json", "pass") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["run", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + assert source_fresher_results.results == [] + + source_fresher_plus_results = self.run_dbt_with_vars( + project, + ["run", "-s", "source_status:fresher+", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_plus_results]) + assert nodes == {"descendant_model"} + + +class TestSourceFresherBuildStateModified(SuccessfulSourceFreshnessTest): + def test_source_fresher_build_state_modified_pass(self, project, project_root): + self.run_dbt_with_vars(project, ["run"]) + + self._set_updated_at_to(project, timedelta(hours=-2)) + previous_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "previous_state/sources.json"] + ) + self._assert_freshness_results("previous_state/sources.json", "pass") + + self._set_updated_at_to(project, timedelta(hours=-1)) + current_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "target/sources.json"] + ) + self._assert_freshness_results("target/sources.json", "pass") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + models_path = project_root.join("models/") + assert os.path.exists(models_path) + with open(f"{models_path}/newly_added_model.sql", "w") as fp: + fp.write(models_newly_added_model_sql) + + copy_to_previous_state() + state_modified_results = self.run_dbt_with_vars( + project, + [ + "build", + "--select", + "source_status:fresher+", + "state:modified+", + "--defer", + "--state", + "previous_state", + ], + ) + nodes = set([elem.node.name for elem in state_modified_results]) + assert nodes == { + "newly_added_model", + "source_unique_test_source_test_table_id", + "unique_descendant_model_id", + "not_null_descendant_model_id", + "source_not_null_test_source_test_table_id", + "descendant_model", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + } + + +class TestSourceFresherRuntimeError(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": error_models_schema_yml, + } + + def test_runtime_error_states(self, project): + self.run_dbt_with_vars(project, ["run"]) + previous_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "previous_state/sources.json"], + expect_pass=False, + ) + assert len(previous_state_results) == 1 + assert previous_state_results[0].status == "runtime error" + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-1)) + current_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "target/sources.json"], + expect_pass=False, + ) + assert len(current_state_results) == 1 + assert current_state_results[0].status == "runtime error" + + assert not hasattr(previous_state_results[0], "max_loaded_at") + assert not hasattr(current_state_results[0], "max_loaded_at") + + source_fresher_results = self.run_dbt_with_vars( + project, + ["test", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + assert source_fresher_results.results == [] + + +class TestSourceFresherTest(SuccessfulSourceFreshnessTest): + def test_source_fresher_run_error(self, project): + self.run_dbt_with_vars(project, ["run"]) + previous_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "previous_state/sources.json"], + expect_pass=False, + ) + self._assert_freshness_results("previous_state/sources.json", "error") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-20)) + current_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "target/sources.json"], + expect_pass=False, + ) + self._assert_freshness_results("target/sources.json", "error") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["test", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_results]) + assert nodes == { + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + } + + source_fresher_plus_results = self.run_dbt_with_vars( + project, + ["test", "-s", "source_status:fresher+", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_plus_results]) + assert nodes == { + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + "unique_descendant_model_id", + "not_null_descendant_model_id", + } + + def test_source_fresher_test_warn(self, project): + self.run_dbt_with_vars(project, ["run"]) + self._set_updated_at_to(project, timedelta(hours=-17)) + previous_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "previous_state/sources.json"], + expect_pass=True, + ) + self._assert_freshness_results("previous_state/sources.json", "warn") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-11)) + current_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "target/sources.json"], + expect_pass=True, + ) + self._assert_freshness_results("target/sources.json", "warn") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["test", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_results]) + assert nodes == { + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + } + + source_fresher_plus_results = self.run_dbt_with_vars( + project, + ["test", "-s", "source_status:fresher+", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_plus_results]) + assert nodes == { + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + "unique_descendant_model_id", + "not_null_descendant_model_id", + } + + def test_source_fresher_test_pass(self, project): + self.run_dbt_with_vars(project, ["run"]) + self._set_updated_at_to(project, timedelta(hours=-2)) + previous_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "previous_state/sources.json"] + ) + self._assert_freshness_results("previous_state/sources.json", "pass") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-1)) + current_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "target/sources.json"] + ) + self._assert_freshness_results("target/sources.json", "pass") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["test", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_results]) + assert nodes == { + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + } + + source_fresher_plus_results = self.run_dbt_with_vars( + project, + ["test", "-s", "source_status:fresher+", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_plus_results]) + assert nodes == { + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + "unique_descendant_model_id", + "not_null_descendant_model_id", + } + + +class TestSourceFresherBuild(SuccessfulSourceFreshnessTest): + def test_source_fresher_build_error(self, project): + self.run_dbt_with_vars(project, ["build"]) + previous_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "previous_state/sources.json"], + expect_pass=False, + ) + self._assert_freshness_results("previous_state/sources.json", "error") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-20)) + current_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "target/sources.json"], + expect_pass=False, + ) + self._assert_freshness_results("target/sources.json", "error") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["build", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_results]) + assert nodes == { + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + } + + source_fresher_plus_results = self.run_dbt_with_vars( + project, + ["build", "-s", "source_status:fresher+", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_plus_results]) + assert nodes == { + "descendant_model", + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + "unique_descendant_model_id", + "not_null_descendant_model_id", + } + + def test_source_fresher_build_warn(self, project): + self.run_dbt_with_vars(project, ["build"]) + self._set_updated_at_to(project, timedelta(hours=-17)) + previous_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "previous_state/sources.json"], + expect_pass=True, + ) + self._assert_freshness_results("previous_state/sources.json", "warn") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-11)) + current_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "target/sources.json"] + ) + self._assert_freshness_results("target/sources.json", "warn") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["build", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_results]) + assert nodes == { + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + } + + source_fresher_plus_results = self.run_dbt_with_vars( + project, + ["build", "-s", "source_status:fresher+", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_plus_results]) + assert nodes == { + "descendant_model", + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + "unique_descendant_model_id", + "not_null_descendant_model_id", + } + + def test_source_fresher_build_pass(self, project): + self.run_dbt_with_vars(project, ["build"]) + self._set_updated_at_to(project, timedelta(hours=-2)) + previous_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "previous_state/sources.json"] + ) + self._assert_freshness_results("previous_state/sources.json", "pass") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-1)) + current_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "target/sources.json"] + ) + self._assert_freshness_results("target/sources.json", "pass") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + source_fresher_results = self.run_dbt_with_vars( + project, + ["build", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_results]) + assert nodes == { + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + } + + source_fresher_plus_results = self.run_dbt_with_vars( + project, + ["build", "-s", "source_status:fresher+", "--defer", "--state", "previous_state"], + ) + nodes = set([elem.node.name for elem in source_fresher_plus_results]) + assert nodes == { + "descendant_model", + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + "source_not_null_test_source_test_table_id", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "source_unique_test_source_test_table_id", + "unique_descendant_model_id", + "not_null_descendant_model_id", + } + + +class TestSourceFresherNoPreviousState(SuccessfulSourceFreshnessTest): + def test_intentional_failure_no_previous_state(self, project): + self.run_dbt_with_vars(project, ["run"]) + # TODO add the current and previous but with previous as null + with pytest.raises(DbtInternalError) as excinfo: + self.run_dbt_with_vars( + project, + ["run", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + assert "No previous state comparison freshness results in sources.json" in str( + excinfo.value + ) + + +class TestSourceFresherNoCurrentState(SuccessfulSourceFreshnessTest): + def test_intentional_failure_no_previous_state(self, project): + self.run_dbt_with_vars(project, ["run"]) + previous_state_results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "previous_state/sources.json"], + expect_pass=False, + ) + self._assert_freshness_results("previous_state/sources.json", "error") + copy_to_previous_state() + assert previous_state_results[0].max_loaded_at is not None + + with pytest.raises(DbtInternalError) as excinfo: + self.run_dbt_with_vars( + project, + ["run", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ) + assert "No current state comparison freshness results in sources.json" in str( + excinfo.value + ) + + +class TestSourceFresherBuildResultSelectors(SuccessfulSourceFreshnessTest): + def test_source_fresher_build_state_modified_pass(self, project, project_root): + models_path = project_root.join("models/") + assert os.path.exists(models_path) + with open(f"{models_path}/newly_added_error_model.sql", "w") as fp: + fp.write(models_newly_added_error_model_sql) + + self.run_dbt_with_vars(project, ["run"], expect_pass=False) + + self._set_updated_at_to(project, timedelta(hours=-2)) + previous_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "previous_state/sources.json"] + ) + + self._assert_freshness_results("previous_state/sources.json", "pass") + copy_to_previous_state() + + self._set_updated_at_to(project, timedelta(hours=-1)) + current_state_results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "target/sources.json"] + ) + + self._assert_freshness_results("target/sources.json", "pass") + + assert previous_state_results[0].max_loaded_at < current_state_results[0].max_loaded_at + + state_modified_results = self.run_dbt_with_vars( + project, + [ + "build", + "--select", + "source_status:fresher+", + "result:error+", + "--defer", + "--state", + "previous_state", + ], + expect_pass=False, + ) + nodes = set([elem.node.name for elem in state_modified_results]) + assert nodes == { + "newly_added_error_model", + "source_unique_test_source_test_table_id", + "unique_descendant_model_id", + "not_null_descendant_model_id", + "source_not_null_test_source_test_table_id", + "descendant_model", + "source_relationships_test_source_test_table_favorite_color__favorite_color__ref_descendant_model_", + "relationships_descendant_model_favorite_color__favorite_color__source_test_source_test_table_", + } diff --git a/tests/functional/sources/test_source_freshness.py b/tests/functional/sources/test_source_freshness.py new file mode 100644 index 000000000..3438832e0 --- /dev/null +++ b/tests/functional/sources/test_source_freshness.py @@ -0,0 +1,403 @@ +from datetime import datetime, timedelta +import os +import json + +from dbt.cli.main import dbtRunner +from dbt.tests.util import AnyFloat, AnyStringWith +import pytest +import yaml + +from dbt.adapters.__about__ import version as DBT_POSTGRES_VERSION +from tests.functional.sources.common_source_setup import BaseSourcesTest +from tests.functional.sources.fixtures import ( + collect_freshness_macro_override_previous_return_signature, + error_models_model_sql, + error_models_schema_yml, + filtered_models_schema_yml, + freshness_via_metadata_schema_yml, + override_freshness_models_schema_yml, +) + + +class SuccessfulSourceFreshnessTest(BaseSourcesTest): + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + self.run_dbt_with_vars(project, ["seed"]) + pytest._id = 101 + pytest.freshness_start_time = datetime.utcnow() + # this is the db initial value + pytest.last_inserted_time = "2016-09-19T14:45:51+00:00" + + os.environ["DBT_ENV_CUSTOM_ENV_key"] = "value" + + yield + + del os.environ["DBT_ENV_CUSTOM_ENV_key"] + + def _set_updated_at_to(self, project, delta): + insert_time = datetime.utcnow() + delta + timestr = insert_time.strftime("%Y-%m-%d %H:%M:%S") + # favorite_color,id,first_name,email,ip_address,updated_at + insert_id = pytest._id + pytest._id += 1 + quoted_columns = ",".join( + project.adapter.quote(c) + for c in ("favorite_color", "id", "first_name", "email", "ip_address", "updated_at") + ) + kwargs = { + "schema": project.test_schema, + "time": timestr, + "id": insert_id, + "source": project.adapter.quote("source"), + "quoted_columns": quoted_columns, + } + raw_code = """INSERT INTO {schema}.{source} + ({quoted_columns}) + VALUES ( + 'blue',{id},'Jake','abc@example.com','192.168.1.1','{time}' + )""".format( + **kwargs + ) + project.run_sql(raw_code) + pytest.last_inserted_time = insert_time.strftime("%Y-%m-%dT%H:%M:%S+00:00") + + def assertBetween(self, timestr, start, end=None): + datefmt = "%Y-%m-%dT%H:%M:%S.%fZ" + if end is None: + end = datetime.utcnow() + + parsed = datetime.strptime(timestr, datefmt) + + assert start <= parsed + assert end >= parsed + + def _assert_freshness_results(self, path, state): + assert os.path.exists(path) + with open(path) as fp: + data = json.load(fp) + + assert set(data) == {"metadata", "results", "elapsed_time"} + assert "generated_at" in data["metadata"] + assert isinstance(data["elapsed_time"], float) + self.assertBetween(data["metadata"]["generated_at"], pytest.freshness_start_time) + assert ( + data["metadata"]["dbt_schema_version"] + == "https://schemas.getdbt.com/dbt/sources/v3.json" + ) + assert data["metadata"]["dbt_version"] == DBT_POSTGRES_VERSION + key = "key" + if os.name == "nt": + key = key.upper() + assert data["metadata"]["env"] == {key: "value"} + + last_inserted_time = pytest.last_inserted_time + + assert len(data["results"]) == 1 + + # TODO: replace below calls - could they be more sane? + assert data["results"] == [ + { + "unique_id": "source.test.test_source.test_table", + "max_loaded_at": last_inserted_time, + "snapshotted_at": AnyStringWith(), + "max_loaded_at_time_ago_in_s": AnyFloat(), + "status": state, + "criteria": { + "filter": None, + "warn_after": {"count": 10, "period": "hour"}, + "error_after": {"count": 18, "period": "hour"}, + }, + "adapter_response": {"_message": "SELECT 1", "code": "SELECT", "rows_affected": 1}, + "thread_id": AnyStringWith("Thread-"), + "execution_time": AnyFloat(), + "timing": [ + { + "name": "compile", + "started_at": AnyStringWith(), + "completed_at": AnyStringWith(), + }, + { + "name": "execute", + "started_at": AnyStringWith(), + "completed_at": AnyStringWith(), + }, + ], + } + ] + + +class TestSourceFreshness(SuccessfulSourceFreshnessTest): + def test_source_freshness(self, project): + # test_source.test_table should have a loaded_at field of `updated_at` + # and a freshness of warn_after: 10 hours, error_after: 18 hours + # by default, our data set is way out of date! + + results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", "target/error_source.json"], expect_pass=False + ) + assert len(results) == 1 + assert results[0].status == "error" + self._assert_freshness_results("target/error_source.json", "error") + + self._set_updated_at_to(project, timedelta(hours=-12)) + results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "target/warn_source.json"], + ) + assert len(results) == 1 + assert results[0].status == "warn" + self._assert_freshness_results("target/warn_source.json", "warn") + + self._set_updated_at_to(project, timedelta(hours=-2)) + results = self.run_dbt_with_vars( + project, + ["source", "freshness", "-o", "target/pass_source.json"], + ) + assert len(results) == 1 + assert results[0].status == "pass" + self._assert_freshness_results("target/pass_source.json", "pass") + + +class TestSourceSnapshotFreshness(SuccessfulSourceFreshnessTest): + def test_source_snapshot_freshness(self, project): + """Ensures that the deprecated command `source snapshot-freshness` + aliases to `source freshness` command. + """ + results = self.run_dbt_with_vars( + project, + ["source", "snapshot-freshness", "-o", "target/error_source.json"], + expect_pass=False, + ) + assert len(results) == 1 + assert results[0].status == "error" + self._assert_freshness_results("target/error_source.json", "error") + + self._set_updated_at_to(project, timedelta(hours=-12)) + results = self.run_dbt_with_vars( + project, + ["source", "snapshot-freshness", "-o", "target/warn_source.json"], + ) + assert len(results) == 1 + assert results[0].status == "warn" + self._assert_freshness_results("target/warn_source.json", "warn") + + self._set_updated_at_to(project, timedelta(hours=-2)) + results = self.run_dbt_with_vars( + project, + ["source", "snapshot-freshness", "-o", "target/pass_source.json"], + ) + assert len(results) == 1 + assert results[0].status == "pass" + self._assert_freshness_results("target/pass_source.json", "pass") + + +class TestSourceFreshnessSelection(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def project_config_update(self, logs_dir): + return { + "target-path": logs_dir, + } + + def test_source_freshness_selection_select(self, project, logs_dir): + """Tests node selection using the --select argument.""" + """Also validate that specify a target-path works as expected.""" + self._set_updated_at_to(project, timedelta(hours=-2)) + # select source directly + results = self.run_dbt_with_vars( + project, + [ + "source", + "freshness", + "--select", + "source:test_source.test_table", + ], + ) + assert len(results) == 1 + assert results[0].status == "pass" + self._assert_freshness_results(f"{logs_dir}/sources.json", "pass") + + +class TestSourceFreshnessExclude(SuccessfulSourceFreshnessTest): + def test_source_freshness_selection_exclude(self, project): + """Tests node selection using the --select argument. It 'excludes' the + only source in the project so it should return no results.""" + self._set_updated_at_to(project, timedelta(hours=-2)) + # exclude source directly + results = self.run_dbt_with_vars( + project, + [ + "source", + "freshness", + "--exclude", + "source:test_source.test_table", + "-o", + "target/exclude_source.json", + ], + ) + assert len(results) == 0 + + +class TestSourceFreshnessGraph(SuccessfulSourceFreshnessTest): + def test_source_freshness_selection_graph_operation(self, project): + """Tests node selection using the --select argument with graph + operations. `+descendant_model` == select all nodes `descendant_model` + depends on. + """ + self._set_updated_at_to(project, timedelta(hours=-2)) + # select model ancestors + results = self.run_dbt_with_vars( + project, + [ + "source", + "freshness", + "--select", + "+descendant_model", + "-o", + "target/ancestor_source.json", + ], + ) + assert len(results) == 1 + assert results[0].status == "pass" + self._assert_freshness_results("target/ancestor_source.json", "pass") + + +class TestSourceFreshnessErrors(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": error_models_schema_yml, + "model.sql": error_models_model_sql, + } + + def test_source_freshness_error(self, project): + results = self.run_dbt_with_vars(project, ["source", "freshness"], expect_pass=False) + assert len(results) == 1 + assert results[0].status == "runtime error" + + +class TestSourceFreshnessFilter(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": filtered_models_schema_yml} + + def test_source_freshness_all_records(self, project): + # all records are filtered out + self.run_dbt_with_vars(project, ["source", "freshness"], expect_pass=False) + # we should insert a record with _id=101 that's fresh, but will still fail + # because the filter excludes it + self._set_updated_at_to(project, timedelta(hours=-2)) + self.run_dbt_with_vars(project, ["source", "freshness"], expect_pass=False) + + # we should now insert a record with _id=102 that's fresh, and the filter + # includes it + self._set_updated_at_to(project, timedelta(hours=-2)) + self.run_dbt_with_vars(project, ["source", "freshness"], expect_pass=True) + + +class TestOverrideSourceFreshness(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": override_freshness_models_schema_yml} + + @staticmethod + def get_result_from_unique_id(data, unique_id): + try: + return list(filter(lambda x: x["unique_id"] == unique_id, data["results"]))[0] + except IndexError: + raise f"No result for the given unique_id. unique_id={unique_id}" + + def test_override_source_freshness(self, project): + self._set_updated_at_to(project, timedelta(hours=-30)) + + path = "target/pass_source.json" + results = self.run_dbt_with_vars( + project, ["source", "freshness", "-o", path], expect_pass=False + ) + assert len(results) == 4 # freshness disabled for source_e + + assert os.path.exists(path) + with open(path) as fp: + data = json.load(fp) + + result_source_a = self.get_result_from_unique_id(data, "source.test.test_source.source_a") + assert result_source_a["status"] == "error" + + expected = { + "warn_after": {"count": 6, "period": "hour"}, + "error_after": {"count": 24, "period": "hour"}, + "filter": None, + } + assert result_source_a["criteria"] == expected + + result_source_b = self.get_result_from_unique_id(data, "source.test.test_source.source_b") + assert result_source_b["status"] == "error" + + expected = { + "warn_after": {"count": 6, "period": "hour"}, + "error_after": {"count": 24, "period": "hour"}, + "filter": None, + } + assert result_source_b["criteria"] == expected + + result_source_c = self.get_result_from_unique_id(data, "source.test.test_source.source_c") + assert result_source_c["status"] == "warn" + + expected = { + "warn_after": {"count": 6, "period": "hour"}, + "error_after": None, + "filter": None, + } + assert result_source_c["criteria"] == expected + + result_source_d = self.get_result_from_unique_id(data, "source.test.test_source.source_d") + assert result_source_d["status"] == "warn" + + expected = { + "warn_after": {"count": 6, "period": "hour"}, + "error_after": {"count": 72, "period": "hour"}, + "filter": None, + } + assert result_source_d["criteria"] == expected + + +class TestSourceFreshnessMacroOverride(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def macros(self): + return { + "collect_freshness.sql": collect_freshness_macro_override_previous_return_signature + } + + def test_source_freshness(self, project): + # ensure that the deprecation warning is raised + vars_dict = { + "test_run_schema": project.test_schema, + "test_loaded_at": project.adapter.quote("updated_at"), + } + events = [] + dbtRunner(callbacks=[events.append]).invoke( + ["source", "freshness", "--vars", yaml.safe_dump(vars_dict)] + ) + matches = list([e for e in events if e.info.name == "CollectFreshnessReturnSignature"]) + assert matches + + +class TestMetadataFreshnessFails: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": freshness_via_metadata_schema_yml} + + def test_metadata_freshness_fails(self, project): + """Since the default test adapter (postgres) does not support metadata + based source freshness checks, trying to use that mechanism should + result in a parse-time warning.""" + got_warning = False + + def warning_probe(e): + nonlocal got_warning + if e.info.name == "FreshnessConfigProblem" and e.info.level == "warn": + got_warning = True + + runner = dbtRunner(callbacks=[warning_probe]) + runner.invoke(["parse"]) + + assert got_warning diff --git a/tests/functional/statements/fixtures.py b/tests/functional/statements/fixtures.py new file mode 100644 index 000000000..6a0b0a4bf --- /dev/null +++ b/tests/functional/statements/fixtures.py @@ -0,0 +1,182 @@ +# +# Seeds +# +seeds__statement_expected = """source,value +matrix,100 +table,100 +""" + +seeds__statement_actual = """id,first_name,last_name,email,gender,ip_address +1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 +2,Kathryn,Walker,kwalker1@ezinearticles.com,Female,194.121.179.35 +3,Gerald,Ryan,gryan2@com.com,Male,11.3.212.243 +4,Bonnie,Spencer,bspencer3@ameblo.jp,Female,216.32.196.175 +5,Harold,Taylor,htaylor4@people.com.cn,Male,253.10.246.136 +6,Jacqueline,Griffin,jgriffin5@t.co,Female,16.13.192.220 +7,Wanda,Arnold,warnold6@google.nl,Female,232.116.150.64 +8,Craig,Ortiz,cortiz7@sciencedaily.com,Male,199.126.106.13 +9,Gary,Day,gday8@nih.gov,Male,35.81.68.186 +10,Rose,Wright,rwright9@yahoo.co.jp,Female,236.82.178.100 +11,Raymond,Kelley,rkelleya@fc2.com,Male,213.65.166.67 +12,Gerald,Robinson,grobinsonb@disqus.com,Male,72.232.194.193 +13,Mildred,Martinez,mmartinezc@samsung.com,Female,198.29.112.5 +14,Dennis,Arnold,darnoldd@google.com,Male,86.96.3.250 +15,Judy,Gray,jgraye@opensource.org,Female,79.218.162.245 +16,Theresa,Garza,tgarzaf@epa.gov,Female,21.59.100.54 +17,Gerald,Robertson,grobertsong@csmonitor.com,Male,131.134.82.96 +18,Philip,Hernandez,phernandezh@adobe.com,Male,254.196.137.72 +19,Julia,Gonzalez,jgonzalezi@cam.ac.uk,Female,84.240.227.174 +20,Andrew,Davis,adavisj@patch.com,Male,9.255.67.25 +21,Kimberly,Harper,kharperk@foxnews.com,Female,198.208.120.253 +22,Mark,Martin,mmartinl@marketwatch.com,Male,233.138.182.153 +23,Cynthia,Ruiz,cruizm@google.fr,Female,18.178.187.201 +24,Samuel,Carroll,scarrolln@youtu.be,Male,128.113.96.122 +25,Jennifer,Larson,jlarsono@vinaora.com,Female,98.234.85.95 +26,Ashley,Perry,aperryp@rakuten.co.jp,Female,247.173.114.52 +27,Howard,Rodriguez,hrodriguezq@shutterfly.com,Male,231.188.95.26 +28,Amy,Brooks,abrooksr@theatlantic.com,Female,141.199.174.118 +29,Louise,Warren,lwarrens@adobe.com,Female,96.105.158.28 +30,Tina,Watson,twatsont@myspace.com,Female,251.142.118.177 +31,Janice,Kelley,jkelleyu@creativecommons.org,Female,239.167.34.233 +32,Terry,Mccoy,tmccoyv@bravesites.com,Male,117.201.183.203 +33,Jeffrey,Morgan,jmorganw@surveymonkey.com,Male,78.101.78.149 +34,Louis,Harvey,lharveyx@sina.com.cn,Male,51.50.0.167 +35,Philip,Miller,pmillery@samsung.com,Male,103.255.222.110 +36,Willie,Marshall,wmarshallz@ow.ly,Male,149.219.91.68 +37,Patrick,Lopez,plopez10@redcross.org,Male,250.136.229.89 +38,Adam,Jenkins,ajenkins11@harvard.edu,Male,7.36.112.81 +39,Benjamin,Cruz,bcruz12@linkedin.com,Male,32.38.98.15 +40,Ruby,Hawkins,rhawkins13@gmpg.org,Female,135.171.129.255 +41,Carlos,Barnes,cbarnes14@a8.net,Male,240.197.85.140 +42,Ruby,Griffin,rgriffin15@bravesites.com,Female,19.29.135.24 +43,Sean,Mason,smason16@icq.com,Male,159.219.155.249 +44,Anthony,Payne,apayne17@utexas.edu,Male,235.168.199.218 +45,Steve,Cruz,scruz18@pcworld.com,Male,238.201.81.198 +46,Anthony,Garcia,agarcia19@flavors.me,Male,25.85.10.18 +47,Doris,Lopez,dlopez1a@sphinn.com,Female,245.218.51.238 +48,Susan,Nichols,snichols1b@freewebs.com,Female,199.99.9.61 +49,Wanda,Ferguson,wferguson1c@yahoo.co.jp,Female,236.241.135.21 +50,Andrea,Pierce,apierce1d@google.co.uk,Female,132.40.10.209 +51,Lawrence,Phillips,lphillips1e@jugem.jp,Male,72.226.82.87 +52,Judy,Gilbert,jgilbert1f@multiply.com,Female,196.250.15.142 +53,Eric,Williams,ewilliams1g@joomla.org,Male,222.202.73.126 +54,Ralph,Romero,rromero1h@sogou.com,Male,123.184.125.212 +55,Jean,Wilson,jwilson1i@ocn.ne.jp,Female,176.106.32.194 +56,Lori,Reynolds,lreynolds1j@illinois.edu,Female,114.181.203.22 +57,Donald,Moreno,dmoreno1k@bbc.co.uk,Male,233.249.97.60 +58,Steven,Berry,sberry1l@eepurl.com,Male,186.193.50.50 +59,Theresa,Shaw,tshaw1m@people.com.cn,Female,120.37.71.222 +60,John,Stephens,jstephens1n@nationalgeographic.com,Male,191.87.127.115 +61,Richard,Jacobs,rjacobs1o@state.tx.us,Male,66.210.83.155 +62,Andrew,Lawson,alawson1p@over-blog.com,Male,54.98.36.94 +63,Peter,Morgan,pmorgan1q@rambler.ru,Male,14.77.29.106 +64,Nicole,Garrett,ngarrett1r@zimbio.com,Female,21.127.74.68 +65,Joshua,Kim,jkim1s@edublogs.org,Male,57.255.207.41 +66,Ralph,Roberts,rroberts1t@people.com.cn,Male,222.143.131.109 +67,George,Montgomery,gmontgomery1u@smugmug.com,Male,76.75.111.77 +68,Gerald,Alvarez,galvarez1v@flavors.me,Male,58.157.186.194 +69,Donald,Olson,dolson1w@whitehouse.gov,Male,69.65.74.135 +70,Carlos,Morgan,cmorgan1x@pbs.org,Male,96.20.140.87 +71,Aaron,Stanley,astanley1y@webnode.com,Male,163.119.217.44 +72,Virginia,Long,vlong1z@spiegel.de,Female,204.150.194.182 +73,Robert,Berry,rberry20@tripadvisor.com,Male,104.19.48.241 +74,Antonio,Brooks,abrooks21@unesco.org,Male,210.31.7.24 +75,Ruby,Garcia,rgarcia22@ovh.net,Female,233.218.162.214 +76,Jack,Hanson,jhanson23@blogtalkradio.com,Male,31.55.46.199 +77,Kathryn,Nelson,knelson24@walmart.com,Female,14.189.146.41 +78,Jason,Reed,jreed25@printfriendly.com,Male,141.189.89.255 +79,George,Coleman,gcoleman26@people.com.cn,Male,81.189.221.144 +80,Rose,King,rking27@ucoz.com,Female,212.123.168.231 +81,Johnny,Holmes,jholmes28@boston.com,Male,177.3.93.188 +82,Katherine,Gilbert,kgilbert29@altervista.org,Female,199.215.169.61 +83,Joshua,Thomas,jthomas2a@ustream.tv,Male,0.8.205.30 +84,Julie,Perry,jperry2b@opensource.org,Female,60.116.114.192 +85,Richard,Perry,rperry2c@oracle.com,Male,181.125.70.232 +86,Kenneth,Ruiz,kruiz2d@wikimedia.org,Male,189.105.137.109 +87,Jose,Morgan,jmorgan2e@webnode.com,Male,101.134.215.156 +88,Donald,Campbell,dcampbell2f@goo.ne.jp,Male,102.120.215.84 +89,Debra,Collins,dcollins2g@uol.com.br,Female,90.13.153.235 +90,Jesse,Johnson,jjohnson2h@stumbleupon.com,Male,225.178.125.53 +91,Elizabeth,Stone,estone2i@histats.com,Female,123.184.126.221 +92,Angela,Rogers,arogers2j@goodreads.com,Female,98.104.132.187 +93,Emily,Dixon,edixon2k@mlb.com,Female,39.190.75.57 +94,Albert,Scott,ascott2l@tinypic.com,Male,40.209.13.189 +95,Barbara,Peterson,bpeterson2m@ow.ly,Female,75.249.136.180 +96,Adam,Greene,agreene2n@fastcompany.com,Male,184.173.109.144 +97,Earl,Sanders,esanders2o@hc360.com,Male,247.34.90.117 +98,Angela,Brooks,abrooks2p@mtv.com,Female,10.63.249.126 +99,Harold,Foster,hfoster2q@privacy.gov.au,Male,139.214.40.244 +100,Carl,Meyer,cmeyer2r@disqus.com,Male,204.117.7.88 +""" + +# +# Models +# +models__statement_actual = """ +-- {{ ref('seed') }} + +{%- call statement('test_statement', fetch_result=True) -%} + + select + count(*) as "num_records" + + from {{ ref('seed') }} + +{%- endcall -%} + +{% set result = load_result('test_statement') %} + +{% set res_table = result['table'] %} +{% set res_matrix = result['data'] %} + +{% set matrix_value = res_matrix[0][0] %} +{% set table_value = res_table[0]['num_records'] %} + +select 'matrix' as source, {{ matrix_value }} as value +union all +select 'table' as source, {{ table_value }} as value +""" + +models__statement_duplicated_load = """ +-- {{ ref('seed') }} + +{%- call statement('test_statement', fetch_result=True) -%} + + select + count(*) as "num_records" + + from {{ ref('seed') }} + +{%- endcall -%} + +{% set result = load_result('test_statement') %} +{% set result = load_result('test_statement') %} + +select 1 +""" + +models__statement_load_main_twice = """ +-- {{ ref('seed') }} + +{%- call statement('main', fetch_result=True) -%} + + select + count(*) as "num_records" + + from {{ ref('seed') }} + +{%- endcall -%} + +{% set result = load_result('main') %} +{% set result = load_result('main') %} + +{% set res_table = result['table'] %} +{% set res_matrix = result['data'] %} + +{% set matrix_value = res_matrix[0][0] %} +{% set table_value = res_table[0]['num_records'] %} + +select 'matrix' as source, {{ matrix_value }} as value +union all +select 'table' as source, {{ table_value }} as value +""" diff --git a/tests/functional/statements/test_statements.py b/tests/functional/statements/test_statements.py new file mode 100644 index 000000000..5e1ec9214 --- /dev/null +++ b/tests/functional/statements/test_statements.py @@ -0,0 +1,64 @@ +import pathlib + +from dbt.tests.util import check_relations_equal, run_dbt, write_file +import pytest + +from tests.functional.statements.fixtures import ( + models__statement_actual, + models__statement_duplicated_load, + models__statement_load_main_twice, + seeds__statement_actual, + seeds__statement_expected, +) + + +class TestStatements: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + # put seeds in 'seed' not 'seeds' directory + (pathlib.Path(project.project_root) / "seed").mkdir(parents=True, exist_ok=True) + write_file(seeds__statement_actual, project.project_root, "seed", "seed.csv") + write_file( + seeds__statement_expected, project.project_root, "seed", "statement_expected.csv" + ) + + @pytest.fixture(scope="class") + def models(self): + return { + "statement_actual.sql": models__statement_actual, + "statement_duplicated_load.sql": models__statement_duplicated_load, + "statement_load_main_twice.sql": models__statement_load_main_twice, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + }, + "seed-paths": ["seed"], + } + + def test_postgres_statements(self, project): + results = run_dbt(["seed"]) + assert len(results) == 2 + results = run_dbt(["run", "-m", "statement_actual"]) + assert len(results) == 1 + + check_relations_equal(project.adapter, ["statement_actual", "statement_expected"]) + + def test_duplicated_load_statements(self, project): + run_dbt(["seed"]) + results = run_dbt(["run", "-m", "statement_duplicated_load"], False) + assert len(results) == 1 + assert results.results[0].status == "error" + assert ( + "The 'statement' result named 'test_statement' has already been loaded into a variable" + in results.results[0].message + ) + + def test_load_statement_on_main_twice(self, project): + run_dbt(["seed"]) + results = run_dbt(["run", "-m", "statement_load_main_twice"]) + assert len(results) == 1 + check_relations_equal(project.adapter, ["statement_load_main_twice", "statement_expected"]) diff --git a/tests/functional/test_access.py b/tests/functional/test_access.py new file mode 100644 index 000000000..c53ca28ef --- /dev/null +++ b/tests/functional/test_access.py @@ -0,0 +1,479 @@ +from dbt.exceptions import DbtReferenceError, InvalidAccessTypeError +from dbt.node_types import AccessType +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file +import pytest + + +my_model_sql = "select 1 as fun" + +another_model_sql = "select 1234 as notfun" + +yet_another_model_sql = "select 999 as weird" + +schema_yml = """ +models: + - name: my_model + description: "my model" + access: public + - name: another_model + description: "yet another model" +""" + +ephemeral_schema_yml = """ +models: + - name: my_model + description: "my model" + access: public + config: + materialized: ephemeral + - name: another_model + description: "yet another model" +""" + +v2_schema_yml = """ +models: + - name: my_model + description: "my model" + access: public + - name: another_model + description: "another model" + - name: yet_another_model + description: "yet another model" + access: unsupported +""" + + +ref_my_model_sql = """ + select fun from {{ ref('my_model') }} +""" + + +groups_yml = """ +groups: + - name: analytics + owner: + name: analytics_owner + - name: marts + owner: + name: marts_owner +""" + + +v3_schema_yml = """ +models: + - name: my_model + description: "my model" + access: private + group: analytics + - name: another_model + description: "yet another model" + - name: ref_my_model + description: "a model that refs my_model" + group: analytics +""" + +v4_schema_yml = """ +models: + - name: my_model + description: "my model" + access: private + group: analytics + - name: another_model + description: "yet another model" + - name: ref_my_model + description: "a model that refs my_model" + group: marts +""" + +simple_exposure_yml = """ +exposures: + - name: simple_exposure + label: simple exposure label + type: dashboard + depends_on: + - ref('my_model') + owner: + email: something@example.com +""" + +v5_schema_yml = """ +models: + - name: my_model + description: "my model" + access: private + group: analytics + - name: another_model + description: "yet another model" + - name: ref_my_model + description: "a model that refs my_model" + group: analytics + - name: people_model + description: "some people" + access: public + group: analytics +""" + +v6_schema_yml = """ +models: + - name: my_model + description: "my model" + config: + access: private + group: analytics + - name: another_model + description: "yet another model" + - name: ref_my_model + description: "a model that refs my_model" + config: + group: analytics + - name: people_model + description: "some people" + config: + access: public + group: analytics +""" + +people_model_sql = """ +select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at +union all +select 1 as id, 'Jeremy' as first_name, 'Cohen' as last_name, 'indigo' as favorite_color, true as loves_dbt, 4 as tenure, current_timestamp as created_at +union all +select 1 as id, 'Callum' as first_name, 'McCann' as last_name, 'emerald' as favorite_color, true as loves_dbt, 0 as tenure, current_timestamp as created_at +""" + +people_semantic_model_yml = """ +semantic_models: + - name: semantic_people + model: ref('people_model') + dimensions: + - name: favorite_color + type: categorical + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + - name: people + agg: count + expr: id + entities: + - name: id + type: primary + defaults: + agg_time_dimension: created_at +""" + + +people_metric_yml = """ +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: "people" + meta: + my_meta: 'testing' + config: + group: analytics +""" + + +v2_people_metric_yml = """ +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: "people" + meta: + my_meta: 'testing' + config: + group: marts +""" + + +dbt_integration_project__dbt_project_yml_restrited_access = """ +name: dbt_integration_project +version: '1.0' +config-version: 2 + +model-paths: ["models"] # paths to models +analysis-paths: ["analyses"] # path with analysis files which are compiled, but not run +target-path: "target" # path for compiled code +clean-targets: ["target"] # directories removed by the clean task +test-paths: ["tests"] # where to store test results +seed-paths: ["seeds"] # load CSVs from this directory with `dbt seed` +macro-paths: ["macros"] # where to find macros + +profile: user + +models: + dbt_integration_project: + +restrict-access: True +""" + + +dbt_integration_project__schema_yml_protected_model = """ +version: 2 +models: +- name: table_model + access: protected +""" + + +dbt_integration_project__schema_yml_private_model = """ +version: 2 +models: +- name: table_model + access: private + group: package +""" + + +ref_package_model_sql = """ + select * from {{ ref('dbt_integration_project', 'table_model') }} +""" + + +schema_yml_ref_package_model = """ +version: 2 +models: +- name: ref_package_model + group: package +""" + + +metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day +""" + + +class TestAccess: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "another_model.sql": yet_another_model_sql, + "schema.yml": schema_yml, + } + + def test_access_attribute(self, project): + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 2 + + my_model_id = "model.test.my_model" + another_model_id = "model.test.another_model" + assert my_model_id in manifest.nodes + assert another_model_id in manifest.nodes + + assert manifest.nodes[my_model_id].access == AccessType.Public + assert manifest.nodes[another_model_id].access == AccessType.Protected + + # write a file with invalid materialization for public access value + write_file(ephemeral_schema_yml, project.project_root, "models", "schema.yml") + with pytest.raises(InvalidAccessTypeError): + run_dbt(["parse"]) + + # write a file with an invalid access value + write_file(yet_another_model_sql, project.project_root, "models", "yet_another_model.sql") + write_file(v2_schema_yml, project.project_root, "models", "schema.yml") + + with pytest.raises(InvalidAccessTypeError): + run_dbt(["parse"]) + + write_file(v2_schema_yml, project.project_root, "models", "schema.yml") + with pytest.raises(InvalidAccessTypeError): + run_dbt(["parse"]) + + # Remove invalid access files and write out model that refs my_model + rm_file(project.project_root, "models", "yet_another_model.sql") + write_file(schema_yml, project.project_root, "models", "schema.yml") + write_file(ref_my_model_sql, project.project_root, "models", "ref_my_model.sql") + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 3 + + # make my_model private, set same group on my_model and ref_my_model + write_file(groups_yml, project.project_root, "models", "groups.yml") + write_file(v3_schema_yml, project.project_root, "models", "schema.yml") + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 3 + manifest = get_manifest(project.project_root) + ref_my_model_id = "model.test.ref_my_model" + assert manifest.nodes[my_model_id].group == "analytics" + assert manifest.nodes[ref_my_model_id].group == "analytics" + + # Change group on ref_my_model and it should raise + write_file(v4_schema_yml, project.project_root, "models", "schema.yml") + with pytest.raises(DbtReferenceError): + run_dbt(["parse"]) + + # put back group on ref_my_model, add exposure with ref to private model + write_file(v3_schema_yml, project.project_root, "models", "schema.yml") + # verify it works again + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 3 + # Write out exposure refing private my_model + write_file(simple_exposure_yml, project.project_root, "models", "simple_exposure.yml") + # Fails with reference error + with pytest.raises(DbtReferenceError): + run_dbt(["parse"]) + + # Remove exposure and add people model and metric file + write_file(v5_schema_yml, project.project_root, "models", "schema.yml") + rm_file(project.project_root, "models", "simple_exposure.yml") + write_file(people_model_sql, "models", "people_model.sql") + write_file(people_semantic_model_yml, "models", "people_semantic_model.yml") + write_file(people_metric_yml, "models", "people_metric.yml") + write_file(metricflow_time_spine_sql, "models", "metricflow_time_spine.sql") + # Should succeed + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 5 + metric_id = "metric.test.number_of_people" + assert manifest.metrics[metric_id].group == "analytics" + + # Use access and group in config + write_file(v5_schema_yml, project.project_root, "models", "schema.yml") + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 5 + assert manifest.nodes["model.test.my_model"].access == AccessType.Private + assert manifest.nodes["model.test.my_model"].group == "analytics" + assert manifest.nodes["model.test.ref_my_model"].access == AccessType.Protected + assert manifest.nodes["model.test.ref_my_model"].group == "analytics" + assert manifest.nodes["model.test.people_model"].access == AccessType.Public + assert manifest.nodes["model.test.people_model"].group == "analytics" + assert manifest.nodes["model.test.another_model"].access == AccessType.Protected + assert manifest.nodes["model.test.another_model"].group is None + + +class TestUnrestrictedPackageAccess: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root, dbt_integration_project): # noqa: F811 + write_project_files(project_root, "dbt_integration_project", dbt_integration_project) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "dbt_integration_project"}]} + + @pytest.fixture(scope="class") + def models(self): + return {"ref_protected_package_model.sql": ref_package_model_sql} + + def test_unrestricted_protected_ref(self, project): + write_file( + dbt_integration_project__schema_yml_protected_model, + project.project_root, + "dbt_integration_project", + "models", + "schema.yml", + ) + run_dbt(["deps"]) + + # Runs without issue because restrict-access defaults to False + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 4 + root_project_model = manifest.nodes["model.test.ref_protected_package_model"] + assert root_project_model.depends_on_nodes == ["model.dbt_integration_project.table_model"] + + +class TestRestrictedPackageAccess: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root, dbt_integration_project): # noqa: F811 + write_project_files(project_root, "dbt_integration_project", dbt_integration_project) + # Set table_model.access to protected + write_file( + dbt_integration_project__schema_yml_protected_model, + project_root, + "dbt_integration_project", + "models", + "schema.yml", + ) + # Set dbt_integration_project.restrict-access to True + write_file( + dbt_integration_project__dbt_project_yml_restrited_access, + project_root, + "dbt_integration_project", + "dbt_project.yml", + ) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "dbt_integration_project"}]} + + @pytest.fixture(scope="class") + def models(self): + return { + "ref_package_model.sql": ref_package_model_sql, + "schema.yml": schema_yml_ref_package_model, + } + + def test_restricted_protected_ref(self, project): + run_dbt(["deps"]) + with pytest.raises(DbtReferenceError): + run_dbt(["parse"]) + + def test_restricted_private_ref(self, project): + run_dbt(["deps"]) + + # Set table_model.access to private + write_file( + dbt_integration_project__schema_yml_private_model, + project.project_root, + "dbt_integration_project", + "models", + "schema.yml", + ) + + with pytest.raises(DbtReferenceError): + run_dbt(["parse"]) + + +dbt_project_yml = """ +models: + test: + subdir_one: + +group: analytics + +access: private + subdir_two: + +group: marts + +access: public +""" + + +class TestAccessDbtProjectConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": my_model_sql, + "subdir_one": { + "model_two.sql": my_model_sql, + }, + "subdir_two": { + "model_three.sql": my_model_sql, + }, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return dbt_project_yml + + def test_dbt_project_access_config(self, project): + write_file(groups_yml, project.project_root, "models", "groups.yml") + manifest = run_dbt(["parse"]) + model_one = manifest.nodes["model.test.model_one"] + model_two = manifest.nodes["model.test.model_two"] + model_three = manifest.nodes["model.test.model_three"] + assert model_one.group is None + assert model_one.access == AccessType.Protected + assert model_two.group == "analytics" + assert model_two.access == AccessType.Private + assert model_three.group == "marts" + assert model_three.access == AccessType.Public diff --git a/tests/functional/test_analyses.py b/tests/functional/test_analyses.py new file mode 100644 index 000000000..aa71d1c69 --- /dev/null +++ b/tests/functional/test_analyses.py @@ -0,0 +1,72 @@ +import os + +from dbt.tests.util import get_manifest, run_dbt +import pytest + + +my_model_sql = """ +select 1 as id +""" + +raw_stuff_sql = """ +{% raw %} +{% invalid jinja stuff %} +{% endraw %} +""" + +schema_yml = """ +version: 2 + +analyses: + - name: my_analysis + description: "This is my analysis" +""" + +my_analysis_sql = """ +select * from {{ ref('my_model') }} +""" + + +class TestAnalyses: + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": my_model_sql} + + @pytest.fixture(scope="class") + def analyses(self): + return { + "raw_stuff.sql": raw_stuff_sql, + "schema.yml": schema_yml, + "my_analysis.sql": my_analysis_sql, + } + + def assert_contents_equal(self, path, expected): + with open(path) as fp: + assert fp.read().strip() == expected + + def test_postgres_analyses(self, project): + compiled_analysis_path = os.path.normpath("target/compiled/test/analyses") + path_1 = os.path.join(compiled_analysis_path, "my_analysis.sql") + path_2 = os.path.join(compiled_analysis_path, "raw_stuff.sql") + + run_dbt(["clean"]) + assert not (os.path.exists(compiled_analysis_path)) + + results = run_dbt(["compile"]) + assert len(results) == 3 + + manifest = get_manifest(project.project_root) + analysis_id = "analysis.test.my_analysis" + assert analysis_id in manifest.nodes + + node = manifest.nodes[analysis_id] + assert node.description == "This is my analysis" + + assert os.path.exists(path_1) + assert os.path.exists(path_2) + + expected_sql = 'select * from "{}"."{}"."my_model"'.format( + project.database, project.test_schema + ) + self.assert_contents_equal(path_1, expected_sql) + self.assert_contents_equal(path_2, "{% invalid jinja stuff %}") diff --git a/tests/functional/test_catalog.py b/tests/functional/test_catalog.py new file mode 100644 index 000000000..97f64f6b1 --- /dev/null +++ b/tests/functional/test_catalog.py @@ -0,0 +1,5 @@ +from dbt.tests.adapter.catalog.relation_types import CatalogRelationTypes + + +class TestCatalogRelationTypes(CatalogRelationTypes): + pass diff --git a/tests/functional/test_clean.py b/tests/functional/test_clean.py new file mode 100644 index 000000000..51cdbcbd2 --- /dev/null +++ b/tests/functional/test_clean.py @@ -0,0 +1,55 @@ +from dbt.tests.util import run_dbt +from dbt_common.exceptions import DbtRuntimeError +import pytest + + +class TestCleanSourcePath: + @pytest.fixture(scope="class") + def project_config_update(self): + return "clean-targets: ['models']" + + def test_clean_source_path(self, project): + with pytest.raises(DbtRuntimeError, match="dbt will not clean the following source paths"): + run_dbt(["clean"]) + + +class TestCleanPathOutsideProjectRelative: + @pytest.fixture(scope="class") + def project_config_update(self): + return "clean-targets: ['..']" + + def test_clean_path_outside_project(self, project): + with pytest.raises( + DbtRuntimeError, + match="dbt will not clean the following directories outside the project", + ): + run_dbt(["clean"]) + + +class TestCleanPathOutsideProjectAbsolute: + @pytest.fixture(scope="class") + def project_config_update(self): + return "clean-targets: ['/']" + + def test_clean_path_outside_project(self, project): + with pytest.raises( + DbtRuntimeError, + match="dbt will not clean the following directories outside the project", + ): + run_dbt(["clean"]) + + +class TestCleanPathOutsideProjectWithFlag: + @pytest.fixture(scope="class") + def project_config_update(self): + return "clean-targets: ['/tmp/foo']" + + def test_clean_path_outside_project(self, project): + # Doesn't fail because flag is set + run_dbt(["clean", "--no-clean-project-files-only"]) + + with pytest.raises( + DbtRuntimeError, + match="dbt will not clean the following directories outside the project", + ): + run_dbt(["clean", "--clean-project-files-only"]) diff --git a/tests/functional/test_colors.py b/tests/functional/test_colors.py new file mode 100644 index 000000000..3d6451462 --- /dev/null +++ b/tests/functional/test_colors.py @@ -0,0 +1,44 @@ +import re + +from dbt.tests.util import run_dbt_and_capture +import pytest + + +models__do_nothing_then_fail_sql = """ +select 1, + +""" + + +@pytest.fixture(scope="class") +def models(): + return {"do_nothing_then_fail.sql": models__do_nothing_then_fail_sql} + + +@pytest.fixture(scope="class") +def project_config_update(): + return {"config-version": 2} + + +class TestColors: + def test_use_colors(self, project): + self.assert_colors_used( + "--use-colors", + expect_colors=True, + ) + + def test_no_use_colors(self, project): + self.assert_colors_used( + "--no-use-colors", + expect_colors=False, + ) + + def assert_colors_used(self, flag, expect_colors): + _, stdout = run_dbt_and_capture(args=[flag, "run"], expect_pass=False) + # pattern to match formatted log output + pattern = re.compile(r"\[31m.*|\[33m.*") + stdout_contains_formatting_characters = bool(pattern.search(stdout)) + if expect_colors: + assert stdout_contains_formatting_characters + else: + assert not stdout_contains_formatting_characters diff --git a/tests/functional/test_column_quotes.py b/tests/functional/test_column_quotes.py new file mode 100644 index 000000000..f19979dad --- /dev/null +++ b/tests/functional/test_column_quotes.py @@ -0,0 +1,100 @@ +from dbt.tests.util import run_dbt +import pytest + + +_MODELS__COLUMN_QUOTING_DEFAULT = """ +{% set col_a = '"col_A"' %} +{% set col_b = '"col_B"' %} + +{{ + config( + materialized = 'incremental', + unique_key = col_a, + ) +}} + +select + {{ col_a }}, + {{ col_b }} +from {{ref('seed')}} +""" + +_MODELS__COLUMN_QUOTING_NO_QUOTING = """ +{% set col_a = '"col_a"' %} +{% set col_b = '"col_b"' %} + +{{ + config( + materialized = 'incremental', + unique_key = col_a, + ) +}} + +select + {{ col_a }}, + {{ col_b }} +from {{ref('seed')}} +""" + +_SEEDS_BASIC_SEED = """col_A,col_B +1,2 +3,4 +5,6 +""" + + +class BaseColumnQuotingTest: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": _MODELS__COLUMN_QUOTING_DEFAULT} + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": _SEEDS_BASIC_SEED} + + @pytest.fixture(scope="function") + def run_column_quotes(self, project): + def fixt(): + results = run_dbt(["seed"]) + assert len(results) == 1 + results = run_dbt(["run"]) + assert len(results) == 1 + results = run_dbt(["run"]) + assert len(results) == 1 + + return fixt + + +class TestColumnQuotingDefault(BaseColumnQuotingTest): + def test_column_quotes(self, run_column_quotes): + run_column_quotes() + + +class TestColumnQuotingEnabled(BaseColumnQuotingTest): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": True, + }, + } + + def test_column_quotes(self, run_column_quotes): + run_column_quotes() + + +class TestColumnQuotingDisabled(BaseColumnQuotingTest): + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": _MODELS__COLUMN_QUOTING_NO_QUOTING} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + }, + } + + def test_column_quotes(self, run_column_quotes): + run_column_quotes() diff --git a/tests/functional/test_config.py b/tests/functional/test_config.py new file mode 100644 index 000000000..100cdbb66 --- /dev/null +++ b/tests/functional/test_config.py @@ -0,0 +1,402 @@ +from argparse import Namespace +from contextlib import contextmanager +import os +import shutil +import tempfile +from unittest import TestCase, mock + +import dbt.config +import dbt.exceptions +import dbt.tracking +import yaml + +from dbt.adapters.postgres import PostgresCredentials +from .utils import normalize + + +INITIAL_ROOT = os.getcwd() + + +@contextmanager +def temp_cd(path): + current_path = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(current_path) + + +@contextmanager +def raises_nothing(): + yield + + +def empty_profile_renderer(): + return dbt.config.renderer.ProfileRenderer({}) + + +def empty_project_renderer(): + return dbt.config.renderer.DbtProjectYamlRenderer() + + +model_config = { + "my_package_name": { + "enabled": True, + "adwords": { + "adwords_ads": {"materialized": "table", "enabled": True, "schema": "analytics"} + }, + "snowplow": { + "snowplow_sessions": { + "sort": "timestamp", + "materialized": "incremental", + "dist": "user_id", + "unique_key": "id", + }, + "base": { + "snowplow_events": { + "sort": ["timestamp", "userid"], + "materialized": "table", + "sort_type": "interleaved", + "dist": "userid", + } + }, + }, + } +} + +model_fqns = frozenset( + ( + ("my_package_name", "snowplow", "snowplow_sessions"), + ("my_package_name", "snowplow", "base", "snowplow_events"), + ("my_package_name", "adwords", "adwords_ads"), + ) +) + + +class Args: + def __init__( + self, + profiles_dir=None, + threads=None, + profile=None, + cli_vars=None, + version_check=None, + project_dir=None, + target=None, + ): + self.profile = profile + self.threads = threads + self.target = target + if profiles_dir is not None: + self.profiles_dir = profiles_dir + if cli_vars is not None: + self.vars = cli_vars + if version_check is not None: + self.version_check = version_check + if project_dir is not None: + self.project_dir = project_dir + + +class BaseConfigTest(TestCase): + """Subclass this, and before calling the superclass setUp, set + self.profiles_dir and self.project_dir. + """ + + def setUp(self): + # Write project + self.project_dir = normalize(tempfile.mkdtemp()) + self.default_project_data = { + "version": "0.0.1", + "name": "my_test_project", + "profile": "default", + } + self.write_project(self.default_project_data) + + # Write profile + self.profiles_dir = normalize(tempfile.mkdtemp()) + self.default_profile_data = { + "default": { + "outputs": { + "postgres": { + "type": "postgres", + "host": "postgres-db-hostname", + "port": 5555, + "user": "db_user", + "pass": "db_pass", + "dbname": "postgres-db-name", + "schema": "postgres-schema", + "threads": 7, + }, + "with-vars": { + "type": "{{ env_var('env_value_type') }}", + "host": "{{ env_var('env_value_host') }}", + "port": "{{ env_var('env_value_port') | as_number }}", + "user": "{{ env_var('env_value_user') }}", + "pass": "{{ env_var('env_value_pass') }}", + "dbname": "{{ env_var('env_value_dbname') }}", + "schema": "{{ env_var('env_value_schema') }}", + }, + "cli-and-env-vars": { + "type": "{{ env_var('env_value_type') }}", + "host": "{{ var('cli_value_host') }}", + "port": "{{ env_var('env_value_port') | as_number }}", + "user": "{{ env_var('env_value_user') }}", + "pass": "{{ env_var('env_value_pass') }}", + "dbname": "{{ env_var('env_value_dbname') }}", + "schema": "{{ env_var('env_value_schema') }}", + }, + }, + "target": "postgres", + }, + "other": { + "outputs": { + "other-postgres": { + "type": "postgres", + "host": "other-postgres-db-hostname", + "port": 4444, + "user": "other_db_user", + "pass": "other_db_pass", + "dbname": "other-postgres-db-name", + "schema": "other-postgres-schema", + "threads": 2, + } + }, + "target": "other-postgres", + }, + "empty_profile_data": {}, + } + self.write_profile(self.default_profile_data) + + self.args = Namespace( + profiles_dir=self.profiles_dir, + cli_vars={}, + version_check=True, + project_dir=self.project_dir, + target=None, + threads=None, + profile=None, + ) + self.env_override = { + "env_value_type": "postgres", + "env_value_host": "env-postgres-host", + "env_value_port": "6543", + "env_value_user": "env-postgres-user", + "env_value_pass": "env-postgres-pass", + "env_value_dbname": "env-postgres-dbname", + "env_value_schema": "env-postgres-schema", + "env_value_profile": "default", + } + + def assertRaisesOrReturns(self, exc): + if exc is None: + return raises_nothing() + else: + return self.assertRaises(exc) + + def tearDown(self): + try: + shutil.rmtree(self.project_dir) + except EnvironmentError: + pass + try: + shutil.rmtree(self.profiles_dir) + except EnvironmentError: + pass + + def project_path(self, name): + return os.path.join(self.project_dir, name) + + def profile_path(self, name): + return os.path.join(self.profiles_dir, name) + + def write_project(self, project_data=None): + if project_data is None: + project_data = self.project_data + with open(self.project_path("dbt_project.yml"), "w") as fp: + yaml.dump(project_data, fp) + + def write_packages(self, package_data): + with open(self.project_path("packages.yml"), "w") as fp: + yaml.dump(package_data, fp) + + def write_profile(self, profile_data=None): + if profile_data is None: + profile_data = self.profile_data + with open(self.profile_path("profiles.yml"), "w") as fp: + yaml.dump(profile_data, fp) + + def write_empty_profile(self): + with open(self.profile_path("profiles.yml"), "w") as fp: + yaml.dump("", fp) + + +class TestProfile(BaseConfigTest): + def from_raw_profiles(self): + renderer = empty_profile_renderer() + return dbt.config.Profile.from_raw_profiles(self.default_profile_data, "default", renderer) + + def test_from_raw_profiles(self): + profile = self.from_raw_profiles() + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "postgres") + self.assertEqual(profile.threads, 7) + self.assertTrue(isinstance(profile.credentials, PostgresCredentials)) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "postgres-db-hostname") + self.assertEqual(profile.credentials.port, 5555) + self.assertEqual(profile.credentials.user, "db_user") + self.assertEqual(profile.credentials.password, "db_pass") + self.assertEqual(profile.credentials.schema, "postgres-schema") + self.assertEqual(profile.credentials.database, "postgres-db-name") + + def test_missing_type(self): + del self.default_profile_data["default"]["outputs"]["postgres"]["type"] + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + self.from_raw_profiles() + self.assertIn("type", str(exc.exception)) + self.assertIn("postgres", str(exc.exception)) + self.assertIn("default", str(exc.exception)) + + def test_bad_type(self): + self.default_profile_data["default"]["outputs"]["postgres"]["type"] = "invalid" + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + self.from_raw_profiles() + self.assertIn("Credentials", str(exc.exception)) + self.assertIn("postgres", str(exc.exception)) + self.assertIn("default", str(exc.exception)) + + def test_invalid_credentials(self): + del self.default_profile_data["default"]["outputs"]["postgres"]["host"] + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + self.from_raw_profiles() + self.assertIn("Credentials", str(exc.exception)) + self.assertIn("postgres", str(exc.exception)) + self.assertIn("default", str(exc.exception)) + + def test_missing_target(self): + profile = self.default_profile_data["default"] + del profile["target"] + profile["outputs"]["default"] = profile["outputs"]["postgres"] + profile = self.from_raw_profiles() + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "default") + self.assertEqual(profile.credentials.type, "postgres") + + +class TestProfileFile(BaseConfigTest): + def from_raw_profile_info(self, raw_profile=None, profile_name="default", **kwargs): + if raw_profile is None: + raw_profile = self.default_profile_data["default"] + renderer = empty_profile_renderer() + kw = { + "raw_profile": raw_profile, + "profile_name": profile_name, + "renderer": renderer, + } + kw.update(kwargs) + return dbt.config.Profile.from_raw_profile_info(**kw) + + def from_args(self, project_profile_name="default", **kwargs): + kw = { + "project_profile_name": project_profile_name, + "renderer": empty_profile_renderer(), + "threads_override": self.args.threads, + "target_override": self.args.target, + "profile_name_override": self.args.profile, + } + kw.update(kwargs) + return dbt.config.Profile.render(**kw) + + def test_profile_simple(self): + profile = self.from_args() + from_raw = self.from_raw_profile_info() + + self.assertEqual(profile.target_name, "postgres") + self.assertEqual(profile.threads, 3) + self.assertTrue(isinstance(profile.credentials, PostgresCredentials)) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "postgres-db-hostname") + self.assertEqual(profile.credentials.port, 5555) + self.assertEqual(profile.credentials.user, "db_user") + self.assertEqual(profile.credentials.password, "db_pass") + self.assertEqual(profile.credentials.schema, "postgres-schema") + self.assertEqual(profile.credentials.database, "postgres-db-name") + self.assertEqual(profile, from_raw) + + def test_profile_override(self): + self.args.profile = "other" + self.args.threads = 3 + profile = self.from_args() + from_raw = self.from_raw_profile_info( + self.default_profile_data["other"], + "other", + threads_override=3, + ) + + self.assertEqual(profile.target_name, "other-postgres") + self.assertEqual(profile.threads, 3) + self.assertTrue(isinstance(profile.credentials, PostgresCredentials)) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "other-postgres-db-hostname") + self.assertEqual(profile.credentials.port, 4444) + self.assertEqual(profile.credentials.user, "other_db_user") + self.assertEqual(profile.credentials.password, "other_db_pass") + self.assertEqual(profile.credentials.schema, "other-postgres-schema") + self.assertEqual(profile.credentials.database, "other-postgres-db-name") + self.assertEqual(profile, from_raw) + + def test_env_vars(self): + self.args.target = "with-vars" + with mock.patch.dict(os.environ, self.env_override): + profile = self.from_args() + from_raw = self.from_raw_profile_info(target_override="with-vars") + + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "with-vars") + self.assertEqual(profile.threads, 1) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "env-postgres-host") + self.assertEqual(profile.credentials.port, 6543) + self.assertEqual(profile.credentials.user, "env-postgres-user") + self.assertEqual(profile.credentials.password, "env-postgres-pass") + self.assertEqual(profile, from_raw) + + def test_env_vars_env_target(self): + self.default_profile_data["default"]["target"] = "{{ env_var('env_value_target') }}" + self.write_profile(self.default_profile_data) + self.env_override["env_value_target"] = "with-vars" + with mock.patch.dict(os.environ, self.env_override): + profile = self.from_args() + from_raw = self.from_raw_profile_info(target_override="with-vars") + + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "with-vars") + self.assertEqual(profile.threads, 1) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "env-postgres-host") + self.assertEqual(profile.credentials.port, 6543) + self.assertEqual(profile.credentials.user, "env-postgres-user") + self.assertEqual(profile.credentials.password, "env-postgres-pass") + self.assertEqual(profile, from_raw) + + def test_cli_and_env_vars(self): + self.args.target = "cli-and-env-vars" + self.args.vars = {"cli_value_host": "cli-postgres-host"} + renderer = dbt.config.renderer.ProfileRenderer({"cli_value_host": "cli-postgres-host"}) + with mock.patch.dict(os.environ, self.env_override): + profile = self.from_args(renderer=renderer) + from_raw = self.from_raw_profile_info( + target_override="cli-and-env-vars", + renderer=renderer, + ) + + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "cli-and-env-vars") + self.assertEqual(profile.threads, 1) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "cli-postgres-host") + self.assertEqual(profile.credentials.port, 6543) + self.assertEqual(profile.credentials.user, "env-postgres-user") + self.assertEqual(profile.credentials.password, "env-postgres-pass") + self.assertEqual(profile, from_raw) diff --git a/tests/functional/test_connection_manager.py b/tests/functional/test_connection_manager.py new file mode 100644 index 000000000..429c0cd29 --- /dev/null +++ b/tests/functional/test_connection_manager.py @@ -0,0 +1,77 @@ +from unittest import TestCase, mock + +from dbt.adapters.contracts.connection import Connection +from dbt.tests.adapter.connection_manager import ConnectionManagerRetry +import psycopg2 + +from dbt.adapters.postgres import PostgresCredentials, PostgresConnectionManager + + +class TestConnectionManagerRetry(ConnectionManagerRetry): + def get_connection(self) -> Connection: + if connection := self.connection: + pass + else: + credentials = PostgresCredentials( + host="localhost", + user="test-user", + port=1111, + password="test-password", + database="test-db", + schema="test-schema", + ) + connection = Connection("postgres", None, credentials) + return connection + + +class TestConnectionManagerOpen(TestCase): + # Postgres-specific + def setUp(self): + self.connection = self.get_connection() + + def get_connection(self) -> Connection: + if connection := self.connection: + pass + else: + credentials = PostgresCredentials( + host="localhost", + user="test-user", + port=1111, + password="test-password", + database="test-db", + schema="test-schema", + retries=2, + ) + connection = Connection("postgres", None, credentials) + return connection + + def test_open(self): + """Test opening a Postgres Connection with failures in the first 3 attempts. + + This test uses a Connection populated with test PostgresCredentials values, and + expects a mock connect to raise a psycopg2.errors.ConnectionFailuer + in the first 3 invocations, after which the mock should return True. As a result: + * The Connection state should be "open" and the handle True, as connect + returns in the 4th attempt. + * The resulting attempt count should be 4. + """ + conn = self.connection + attempt = 0 + + def connect(*args, **kwargs): + nonlocal attempt + attempt += 1 + + if attempt <= 2: + raise psycopg2.errors.ConnectionFailure("Connection has failed") + + return True + + with mock.patch("psycopg2.connect", wraps=connect) as mock_connect: + PostgresConnectionManager.open(conn) + + assert mock_connect.call_count == 3 + + assert attempt == 3 + assert conn.state == "open" + assert conn.handle is True diff --git a/tests/functional/test_custom_target_path.py b/tests/functional/test_custom_target_path.py new file mode 100644 index 000000000..cec2ed2ff --- /dev/null +++ b/tests/functional/test_custom_target_path.py @@ -0,0 +1,34 @@ +from pathlib import Path + +from dbt.tests.util import run_dbt +import pytest + + +class TestTargetPathConfig: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"config-version": 2, "target-path": "project_target"} + + def test_target_path(self, project): + run_dbt(["run"]) + assert Path("project_target").is_dir() + assert not Path("target").is_dir() + + +class TestTargetPathEnvVar: + def test_target_path(self, project, monkeypatch): + monkeypatch.setenv("DBT_TARGET_PATH", "env_target") + run_dbt(["run"]) + assert Path("env_target").is_dir() + assert not Path("project_target").is_dir() + assert not Path("target").is_dir() + + +class TestTargetPathCliArg: + def test_target_path(self, project, monkeypatch): + monkeypatch.setenv("DBT_TARGET_PATH", "env_target") + run_dbt(["run", "--target-path", "cli_target"]) + assert Path("cli_target").is_dir() + assert not Path("env_target").is_dir() + assert not Path("project_target").is_dir() + assert not Path("target").is_dir() diff --git a/tests/functional/test_cycles.py b/tests/functional/test_cycles.py new file mode 100644 index 000000000..984b07bfe --- /dev/null +++ b/tests/functional/test_cycles.py @@ -0,0 +1,68 @@ +from dbt.tests.util import run_dbt +import pytest + + +model_a_sql = """ +select * from {{ ref('model_b') }} +""" + +model_b_sql = """ +select * from {{ ref('model_a') }} +""" + +complex_cycle__model_a_sql = """ +select 1 as id +""" + +complex_cycle__model_b_sql = """ +select * from {{ ref('model_a') }}s +union all +select * from {{ ref('model_e') }} +""" + +complex_cycle__model_c_sql = """ +select * from {{ ref('model_b') }} +""" + +complex_cycle__model_d_sql = """ +select * from {{ ref('model_c') }} +""" + +complex_cycle__model_e_sql = """ +select * from {{ ref('model_e') }} +""" + + +class TestSimpleCycle: + @pytest.fixture(scope="class") + def models(self): + return {"model_a.sql": model_a_sql, "model_b.sql": model_b_sql} + + def test_simple_cycle(self, project): + with pytest.raises(RuntimeError) as exc: + run_dbt(["run"]) + expected_msg = "Found a cycle" + assert expected_msg in str(exc.value) + + +class TestComplexCycle: + @pytest.fixture(scope="class") + def models(self): + # The cycle in this graph looks like: + # A -> B -> C -> D + # ^ | + # | | + # +--- E <--+ + return { + "model_a.sql": complex_cycle__model_a_sql, + "model_b.sql": complex_cycle__model_b_sql, + "model_c.sql": complex_cycle__model_c_sql, + "model_d.sql": complex_cycle__model_d_sql, + "model_e.sql": complex_cycle__model_e_sql, + } + + def test_complex_cycle(self, project): + with pytest.raises(RuntimeError) as exc: + run_dbt(["run"]) + expected_msg = "Found a cycle" + assert expected_msg in str(exc.value) diff --git a/tests/functional/test_dbt_runner.py b/tests/functional/test_dbt_runner.py new file mode 100644 index 000000000..c1e05f0fa --- /dev/null +++ b/tests/functional/test_dbt_runner.py @@ -0,0 +1,73 @@ +from unittest import mock + +from dbt.cli.exceptions import DbtUsageException +from dbt.cli.main import dbtRunner +from dbt.exceptions import DbtProjectError +import pytest + + +class TestDbtRunner: + @pytest.fixture + def dbt(self) -> dbtRunner: + return dbtRunner() + + def test_group_invalid_option(self, dbt: dbtRunner) -> None: + res = dbt.invoke(["--invalid-option"]) + assert type(res.exception) == DbtUsageException + + def test_command_invalid_option(self, dbt: dbtRunner) -> None: + res = dbt.invoke(["deps", "--invalid-option"]) + assert type(res.exception) == DbtUsageException + + def test_command_mutually_exclusive_option(self, dbt: dbtRunner) -> None: + res = dbt.invoke(["--warn-error", "--warn-error-options", '{"include": "all"}', "deps"]) + assert type(res.exception) == DbtUsageException + res = dbt.invoke(["deps", "--warn-error", "--warn-error-options", '{"include": "all"}']) + assert type(res.exception) == DbtUsageException + + def test_invalid_command(self, dbt: dbtRunner) -> None: + res = dbt.invoke(["invalid-command"]) + assert type(res.exception) == DbtUsageException + + def test_invoke_version(self, dbt: dbtRunner) -> None: + dbt.invoke(["--version"]) + + def test_callbacks(self) -> None: + mock_callback = mock.MagicMock() + dbt = dbtRunner(callbacks=[mock_callback]) + # the `debug` command is one of the few commands wherein you don't need + # to have a project to run it and it will emit events + dbt.invoke(["debug"]) + mock_callback.assert_called() + + def test_invoke_kwargs(self, project, dbt): + res = dbt.invoke( + ["run"], + log_format="json", + log_path="some_random_path", + version_check=False, + profile_name="some_random_profile_name", + target_dir="some_random_target_dir", + ) + assert res.result.args["log_format"] == "json" + assert res.result.args["log_path"] == "some_random_path" + assert res.result.args["version_check"] is False + assert res.result.args["profile_name"] == "some_random_profile_name" + assert res.result.args["target_dir"] == "some_random_target_dir" + + def test_invoke_kwargs_project_dir(self, project, dbt): + res = dbt.invoke(["run"], project_dir="some_random_project_dir") + assert type(res.exception) == DbtProjectError + + msg = "No dbt_project.yml found at expected path some_random_project_dir" + assert msg in res.exception.msg + + def test_invoke_kwargs_profiles_dir(self, project, dbt): + res = dbt.invoke(["run"], profiles_dir="some_random_profiles_dir") + assert type(res.exception) == DbtProjectError + msg = "Could not find profile named 'test'" + assert msg in res.exception.msg + + def test_invoke_kwargs_and_flags(self, project, dbt): + res = dbt.invoke(["--log-format=text", "run"], log_format="json") + assert res.result.args["log_format"] == "json" diff --git a/tests/functional/test_default_selectors.py b/tests/functional/test_default_selectors.py new file mode 100644 index 000000000..b60581ded --- /dev/null +++ b/tests/functional/test_default_selectors.py @@ -0,0 +1,99 @@ +from dbt.tests.util import run_dbt +import pytest + + +models__schema_yml = """ +version: 2 + +sources: + - name: src + schema: "{{ target.schema }}" + freshness: + warn_after: {count: 24, period: hour} + loaded_at_field: _loaded_at + tables: + - name: source_a + identifier: model_c + columns: + - name: fun + - name: _loaded_at + - name: src + schema: "{{ target.schema }}" + freshness: + warn_after: {count: 24, period: hour} + loaded_at_field: _loaded_at + tables: + - name: source_b + identifier: model_c + columns: + - name: fun + - name: _loaded_at + +models: + - name: model_a + columns: + - name: fun + tags: [marketing] + - name: model_b + columns: + - name: fun + tags: [finance] +""" + +models__model_a_sql = """ +SELECT 1 AS fun +""" + +models__model_b_sql = """ +SELECT 1 AS fun +""" + +seeds__model_c_csv = """fun,_loaded_at +1,2021-04-19 01:00:00""" + + +@pytest.fixture(scope="class") +def models(): + return { + "schema.yml": models__schema_yml, + "model_b.sql": models__model_b_sql, + "model_a.sql": models__model_a_sql, + } + + +@pytest.fixture(scope="class") +def seeds(): + return {"model_c.csv": seeds__model_c_csv} + + +@pytest.fixture(scope="class") +def selectors(): + return """ + selectors: + - name: default_selector + description: test default selector + definition: + union: + - method: source + value: "test.src.source_a" + - method: fqn + value: "model_a" + default: true + """ + + +class TestDefaultSelectors: + def test_model__list(self, project): + result = run_dbt(["ls", "--resource-type", "model"]) + assert "test.model_a" in result + + def test_model__compile(self, project): + result = run_dbt(["compile"]) + assert len(result) == 1 + assert result.results[0].node.name == "model_a" + + def test_source__freshness(self, project): + run_dbt(["seed", "-s", "test.model_c"]) + result = run_dbt(["source", "freshness"]) + assert len(result) == 1 + assert result.results[0].node.name == "source_a" diff --git a/tests/functional/test_events.py b/tests/functional/test_events.py new file mode 100644 index 000000000..e43743ed1 --- /dev/null +++ b/tests/functional/test_events.py @@ -0,0 +1,33 @@ +import os + +from dbt.cli.main import dbtRunner +from dbt_common.events.base_types import EventLevel + + +def test_performance_report(project): + + resource_report_level = None + + def check_for_report(e): + # If we see a ResourceReport event, record its level + if e.info.name == "ResourceReport": + nonlocal resource_report_level + resource_report_level = e.info.level + + runner = dbtRunner(callbacks=[check_for_report]) + + runner.invoke(["run"]) + + # With not cli flag or env var set, ResourceReport should be debug level. + assert resource_report_level == EventLevel.DEBUG + + try: + os.environ["DBT_SHOW_RESOURCE_REPORT"] = "1" + runner.invoke(["run"]) + + # With the appropriate env var set, ResourceReport should be info level. + # This allows this fairly technical log line to be omitted by default + # but still available in production scenarios. + assert resource_report_level == EventLevel.INFO + finally: + del os.environ["DBT_SHOW_RESOURCE_REPORT"] diff --git a/tests/functional/test_experimental_parser.py b/tests/functional/test_experimental_parser.py new file mode 100644 index 000000000..b30119147 --- /dev/null +++ b/tests/functional/test_experimental_parser.py @@ -0,0 +1,303 @@ +import os + +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import RefArgs +from dbt.tests.util import run_dbt, run_dbt_and_capture +import pytest + + +def get_manifest(): + path = "./target/partial_parse.msgpack" + if os.path.exists(path): + with open(path, "rb") as fp: + manifest_mp = fp.read() + manifest: Manifest = Manifest.from_msgpack(manifest_mp) + return manifest + else: + return None + + +basic__schema_yml = """ +version: 2 + +sources: + - name: my_src + schema: "{{ target.schema }}" + tables: + - name: my_tbl + +models: + - name: model_a + columns: + - name: fun + +""" + +basic__model_a_sql = """ +{{ config(tags='hello', x=False) }} +{{ config(tags='world', x=True) }} + +select * from {{ ref('model_b') }} +cross join {{ source('my_src', 'my_tbl') }} +where false as boop + +""" + +basic__model_b_sql = """ +select 1 as fun +""" + + +ref_macro__schema_yml = """ +version: 2 + +""" + +ref_macro__models__model_a_sql = """ +select 1 as id + +""" + +source_macro__macros__source_sql = """ +{% macro source(source_name, table_name) %} + +{% endmacro %} +""" + +source_macro__schema_yml = """ +version: 2 + +""" + +source_macro__models__model_a_sql = """ +select 1 as id + +""" + +config_macro__macros__config_sql = """ +{% macro config() %} + +{% endmacro %} +""" + +config_macro__schema_yml = """ +version: 2 + +""" + +config_macro__models__model_a_sql = """ +select 1 as id + +""" + + +class BasicExperimentalParser: + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": basic__model_a_sql, + "model_b.sql": basic__model_b_sql, + "schema.yml": basic__schema_yml, + } + + +class TestBasicExperimentalParserFlag(BasicExperimentalParser): + @pytest.fixture(scope="class", autouse=True) + def setup(self, project): + os.environ["DBT_USE_EXPERIMENTAL_PARSER"] = "true" + yield + del os.environ["DBT_USE_EXPERIMENTAL_PARSER"] + + def test_env_use_experimental_parser(self, project): + _, log_output = run_dbt_and_capture(["--debug", "parse"]) + + # successful stable static parsing + assert not ("1699: " in log_output) + # successful experimental static parsing + assert "1698: " in log_output + # experimental parser failed + assert not ("1604: " in log_output) + # static parser failed + assert not ("1603: " in log_output) + # jinja rendering + assert not ("1602: " in log_output) + + +class TestBasicStaticParserFlag(BasicExperimentalParser): + @pytest.fixture(scope="class", autouse=True) + def setup(self, project): + os.environ["DBT_STATIC_PARSER"] = "false" + yield + del os.environ["DBT_STATIC_PARSER"] + + def test_env_static_parser(self, project): + _, log_output = run_dbt_and_capture(["--debug", "parse"]) + + print(log_output) + + # jinja rendering because of --no-static-parser + assert "1605: " in log_output + # successful stable static parsing + assert not ("1699: " in log_output) + # successful experimental static parsing + assert not ("1698: " in log_output) + # experimental parser failed + assert not ("1604: " in log_output) + # static parser failed + assert not ("1603: " in log_output) + # fallback jinja rendering + assert not ("1602: " in log_output) + + +class TestBasicExperimentalParser(BasicExperimentalParser): + # test that the experimental parser extracts some basic ref, source, and config calls. + def test_experimental_parser_basic( + self, + project, + ): + run_dbt(["--use-experimental-parser", "parse"]) + manifest = get_manifest() + node = manifest.nodes["model.test.model_a"] + assert node.refs == [RefArgs(name="model_b")] + assert node.sources == [["my_src", "my_tbl"]] + assert node.config._extra == {"x": True} + assert node.config.tags == ["hello", "world"] + + +class TestBasicStaticParser(BasicExperimentalParser): + # test that the static parser extracts some basic ref, source, and config calls by default + # without the experimental flag and without rendering jinja + def test_static_parser_basic(self, project): + _, log_output = run_dbt_and_capture(["--debug", "parse"]) + + # successful stable static parsing + assert "1699: " in log_output + # successful experimental static parsing + assert not ("1698: " in log_output) + # experimental parser failed + assert not ("1604: " in log_output) + # static parser failed + assert not ("1603: " in log_output) + # jinja rendering + assert not ("1602: " in log_output) + + manifest = get_manifest() + node = manifest.nodes["model.test.model_a"] + assert node.refs == [RefArgs(name="model_b")] + assert node.sources == [["my_src", "my_tbl"]] + assert node.config._extra == {"x": True} + assert node.config.tags == ["hello", "world"] + + +class TestBasicNoStaticParser(BasicExperimentalParser): + # test that the static parser doesn't run when the flag is set + def test_static_parser_is_disabled(self, project): + _, log_output = run_dbt_and_capture(["--debug", "--no-static-parser", "parse"]) + + # jinja rendering because of --no-static-parser + assert "1605: " in log_output + # successful stable static parsing + assert not ("1699: " in log_output) + # successful experimental static parsing + assert not ("1698: " in log_output) + # experimental parser failed + assert not ("1604: " in log_output) + # static parser failed + assert not ("1603: " in log_output) + # fallback jinja rendering + assert not ("1602: " in log_output) + + +class TestRefOverrideExperimentalParser: + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": ref_macro__models__model_a_sql, + "schema.yml": ref_macro__schema_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "source.sql": source_macro__macros__source_sql, + } + + # test that the experimental parser doesn't run if the ref built-in is overriden with a macro + def test_experimental_parser_ref_override( + self, + project, + ): + _, log_output = run_dbt_and_capture(["--debug", "--use-experimental-parser", "parse"]) + + print(log_output) + + # successful experimental static parsing + assert not ("1698: " in log_output) + # fallback to jinja rendering + assert "1602: " in log_output + # experimental parser failed + assert not ("1604: " in log_output) + # didn't run static parser because dbt detected a built-in macro override + assert "1601: " in log_output + + +class TestSourceOverrideExperimentalParser: + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": source_macro__models__model_a_sql, + "schema.yml": source_macro__schema_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "source.sql": source_macro__macros__source_sql, + } + + # test that the experimental parser doesn't run if the source built-in is overriden with a macro + def test_experimental_parser_source_override( + self, + project, + ): + _, log_output = run_dbt_and_capture(["--debug", "--use-experimental-parser", "parse"]) + + # successful experimental static parsing + assert not ("1698: " in log_output) + # fallback to jinja rendering + assert "1602: " in log_output + # experimental parser failed + assert not ("1604: " in log_output) + # didn't run static parser because dbt detected a built-in macro override + assert "1601: " in log_output + + +class TestConfigOverrideExperimentalParser: + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": config_macro__models__model_a_sql, + "schema.yml": config_macro__schema_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return { + "config.sql": config_macro__macros__config_sql, + } + + # test that the experimental parser doesn't run if the config built-in is overriden with a macro + def test_experimental_parser_config_override( + self, + project, + ): + _, log_output = run_dbt_and_capture(["--debug", "--use-experimental-parser", "parse"]) + + # successful experimental static parsing + assert not ("1698: " in log_output) + # fallback to jinja rendering + assert "1602: " in log_output + # experimental parser failed + assert not ("1604: " in log_output) + # didn't run static parser because dbt detected a built-in macro override + assert "1601: " in log_output diff --git a/tests/functional/test_external_reference.py b/tests/functional/test_external_reference.py new file mode 100644 index 000000000..d4b980e08 --- /dev/null +++ b/tests/functional/test_external_reference.py @@ -0,0 +1,58 @@ +from dbt.tests.util import run_dbt +import pytest + + +external_model_sql = """ +{{ + config( + materialized = "view" + ) +}} + +select * from "{{ this.schema + 'z' }}"."external" +""" + +model_sql = """ +select 1 as id +""" + + +class TestExternalReference: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": external_model_sql} + + def test_external_reference(self, project, unique_schema): + external_schema = unique_schema + "z" + project.run_sql(f'create schema "{external_schema}"') + project.run_sql(f'create table "{external_schema}"."external" (id integer)') + project.run_sql(f'insert into "{external_schema}"."external" values (1), (2)') + + results = run_dbt(["run"]) + assert len(results) == 1 + + # running it again should succeed + results = run_dbt(["run"]) + assert len(results) == 1 + + +# The opposite of the test above -- check that external relations that +# depend on a dbt model do not create issues with caching +class TestExternalDependency: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": model_sql} + + def test_external_reference(self, project, unique_schema): + results = run_dbt(["run"]) + assert len(results) == 1 + + external_schema = unique_schema + "z" + project.run_sql(f'create schema "{external_schema}"') + project.run_sql( + f'create view "{external_schema}"."external" as (select * from {unique_schema}.model)' + ) + + # running it again should succeed + results = run_dbt(["run"]) + assert len(results) == 1 diff --git a/tests/functional/test_fail_fast.py b/tests/functional/test_fail_fast.py new file mode 100644 index 000000000..e60b70d1d --- /dev/null +++ b/tests/functional/test_fail_fast.py @@ -0,0 +1,64 @@ +import json +from pathlib import Path + +from dbt.tests.util import run_dbt +import pytest + + +models__one_sql = """ +select 1 +""" + +models__two_sql = """ +-- depends_on: {{ ref('one') }} +select 1 /failed +""" + + +class FailFastBase: + @pytest.fixture(scope="class") + def models(self): + return {"one.sql": models__one_sql, "two.sql": models__two_sql} + + +class TestFastFailingDuringRun(FailFastBase): + def test_fail_fast_run( + self, + project, + models, # noqa: F811 + ): + res = run_dbt(["run", "--fail-fast", "--threads", "1"], expect_pass=False) + assert {r.node.unique_id: r.status for r in res.results} == { + "model.test.one": "success", + "model.test.two": "error", + } + + run_results_file = Path(project.project_root) / "target/run_results.json" + assert run_results_file.is_file() + with run_results_file.open() as run_results_str: + run_results = json.loads(run_results_str.read()) + assert len(run_results["results"]) == 2 + assert run_results["results"][0]["status"] == "success" + assert run_results["results"][1]["status"] == "error" + + +class TestFailFastFromConfig(FailFastBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "send_anonymous_usage_stats": False, + "fail_fast": True, + } + } + + def test_fail_fast_run_project_flags( + self, + project, + models, # noqa: F811 + ): + res = run_dbt(["run", "--threads", "1"], expect_pass=False) + assert {r.node.unique_id: r.status for r in res.results} == { + "model.test.one": "success", + "model.test.two": "error", + } diff --git a/tests/functional/test_init.py b/tests/functional/test_init.py new file mode 100644 index 000000000..4d29db086 --- /dev/null +++ b/tests/functional/test_init.py @@ -0,0 +1,845 @@ +import os +from pathlib import Path +from unittest.mock import Mock, call, patch + +import click +from dbt_common.exceptions import DbtRuntimeError +from dbt.tests.util import run_dbt +import pytest +import yaml + + +class TestInitProjectWithExistingProfilesYml: + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.confirm") + @patch("click.prompt") + def test_init_task_in_project_with_existing_profiles_yml( + self, mock_prompt, mock_confirm, mock_get_adapter, project + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.attach_mock(mock_confirm, "confirm") + manager.confirm.side_effect = ["y"] + manager.prompt.side_effect = [ + 1, + "localhost", + 5432, + "test_user", + "test_password", + "test_db", + "test_schema", + 4, + ] + mock_get_adapter.return_value = [project.adapter.type()] + + run_dbt(["init"]) + + manager.assert_has_calls( + [ + call.confirm( + f"The profile test already exists in {os.path.join(project.profiles_dir, 'profiles.yml')}. Continue and overwrite it?" + ), + call.prompt( + "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", + type=click.INT, + ), + call.prompt( + "host (hostname for the instance)", default=None, hide_input=False, type=None + ), + call.prompt("port", default=5432, hide_input=False, type=click.INT), + call.prompt("user (dev username)", default=None, hide_input=False, type=None), + call.prompt("pass (dev password)", default=None, hide_input=True, type=None), + call.prompt( + "dbname (default database that dbt will build objects in)", + default=None, + hide_input=False, + type=None, + ), + call.prompt( + "schema (default schema that dbt will build objects in)", + default=None, + hide_input=False, + type=None, + ), + call.prompt("threads (1 or more)", default=1, hide_input=False, type=click.INT), + ] + ) + + with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: + assert ( + f.read() + == """test: + outputs: + dev: + dbname: test_db + host: localhost + pass: test_password + port: 5432 + schema: test_schema + threads: 4 + type: postgres + user: test_user + target: dev +""" + ) + + def test_init_task_in_project_specifying_profile_errors(self): + with pytest.raises(DbtRuntimeError) as error: + run_dbt(["init", "--profile", "test"], expect_pass=False) + assert "Can not init existing project with specified profile" in str(error) + + +class TestInitProjectWithoutExistingProfilesYml: + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.prompt") + @patch.object(Path, "exists", autospec=True) + def test_init_task_in_project_without_existing_profiles_yml( + self, exists, mock_prompt, mock_get_adapter, project + ): + def exists_side_effect(path): + # Override responses on specific files, default to 'real world' if not overriden + return {"profiles.yml": False}.get(path.name, os.path.exists(path)) + + exists.side_effect = exists_side_effect + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.prompt.side_effect = [ + 1, + "localhost", + 5432, + "test_user", + "test_password", + "test_db", + "test_schema", + 4, + ] + mock_get_adapter.return_value = [project.adapter.type()] + + run_dbt(["init"]) + + manager.assert_has_calls( + [ + call.prompt( + "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", + type=click.INT, + ), + call.prompt( + "host (hostname for the instance)", default=None, hide_input=False, type=None + ), + call.prompt("port", default=5432, hide_input=False, type=click.INT), + call.prompt("user (dev username)", default=None, hide_input=False, type=None), + call.prompt("pass (dev password)", default=None, hide_input=True, type=None), + call.prompt( + "dbname (default database that dbt will build objects in)", + default=None, + hide_input=False, + type=None, + ), + call.prompt( + "schema (default schema that dbt will build objects in)", + default=None, + hide_input=False, + type=None, + ), + call.prompt("threads (1 or more)", default=1, hide_input=False, type=click.INT), + ] + ) + + with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: + assert ( + f.read() + == """test: + outputs: + dev: + dbname: test_db + host: localhost + pass: test_password + port: 5432 + schema: test_schema + threads: 4 + type: postgres + user: test_user + target: dev +""" + ) + + @patch.object(Path, "exists", autospec=True) + def test_init_task_in_project_without_profile_yml_specifying_profile_errors(self, exists): + def exists_side_effect(path): + # Override responses on specific files, default to 'real world' if not overriden + return {"profiles.yml": False}.get(path.name, os.path.exists(path)) + + exists.side_effect = exists_side_effect + + # Even through no profiles.yml file exists, the init will not modify project.yml, + # so this errors + with pytest.raises(DbtRuntimeError) as error: + run_dbt(["init", "--profile", "test"], expect_pass=False) + assert "Could not find profile named test" in str(error) + + +class TestInitProjectWithoutExistingProfilesYmlOrTemplate: + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.confirm") + @patch("click.prompt") + @patch.object(Path, "exists", autospec=True) + def test_init_task_in_project_without_existing_profiles_yml_or_profile_template( + self, exists, mock_prompt, mock_confirm, mock_get_adapter, project + ): + def exists_side_effect(path): + # Override responses on specific files, default to 'real world' if not overriden + return { + "profiles.yml": False, + "profile_template.yml": False, + }.get(path.name, os.path.exists(path)) + + exists.side_effect = exists_side_effect + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.attach_mock(mock_confirm, "confirm") + manager.prompt.side_effect = [ + 1, + ] + mock_get_adapter.return_value = [project.adapter.type()] + run_dbt(["init"]) + manager.assert_has_calls( + [ + call.prompt( + "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", + type=click.INT, + ), + ] + ) + + with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: + assert ( + f.read() + == """test: + outputs: + + dev: + type: postgres + threads: [1 or more] + host: [host] + port: [port] + user: [dev_username] + pass: [dev_password] + dbname: [dbname] + schema: [dev_schema] + + prod: + type: postgres + threads: [1 or more] + host: [host] + port: [port] + user: [prod_username] + pass: [prod_password] + dbname: [dbname] + schema: [prod_schema] + + target: dev +""" + ) + + +class TestInitProjectWithProfileTemplateWithoutExistingProfilesYml: + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.confirm") + @patch("click.prompt") + @patch.object(Path, "exists", autospec=True) + def test_init_task_in_project_with_profile_template_without_existing_profiles_yml( + self, exists, mock_prompt, mock_confirm, mock_get_adapter, project + ): + def exists_side_effect(path): + # Override responses on specific files, default to 'real world' if not overriden + return { + "profiles.yml": False, + }.get(path.name, os.path.exists(path)) + + exists.side_effect = exists_side_effect + + with open("profile_template.yml", "w") as f: + f.write( + """fixed: + type: postgres + threads: 4 + host: localhost + dbname: my_db + schema: my_schema + target: my_target +prompts: + target: + hint: 'The target name' + type: string + port: + hint: 'The port (for integer test purposes)' + type: int + default: 5432 + user: + hint: 'Your username' + pass: + hint: 'Your password' + hide_input: true""" + ) + + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.attach_mock(mock_confirm, "confirm") + manager.prompt.side_effect = ["my_target", 5432, "test_username", "test_password"] + mock_get_adapter.return_value = [project.adapter.type()] + run_dbt(["init"]) + manager.assert_has_calls( + [ + call.prompt( + "target (The target name)", default=None, hide_input=False, type=click.STRING + ), + call.prompt( + "port (The port (for integer test purposes))", + default=5432, + hide_input=False, + type=click.INT, + ), + call.prompt("user (Your username)", default=None, hide_input=False, type=None), + call.prompt("pass (Your password)", default=None, hide_input=True, type=None), + ] + ) + + with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: + assert ( + f.read() + == """test: + outputs: + my_target: + dbname: my_db + host: localhost + pass: test_password + port: 5432 + schema: my_schema + threads: 4 + type: postgres + user: test_username + target: my_target +""" + ) + + +class TestInitInvalidProfileTemplate: + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.confirm") + @patch("click.prompt") + def test_init_task_in_project_with_invalid_profile_template( + self, mock_prompt, mock_confirm, mock_get_adapter, project + ): + """Test that when an invalid profile_template.yml is provided in the project, + init command falls back to the target's profile_template.yml""" + with open(os.path.join(project.project_root, "profile_template.yml"), "w") as f: + f.write("""invalid template""") + + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.attach_mock(mock_confirm, "confirm") + manager.confirm.side_effect = ["y"] + manager.prompt.side_effect = [ + 1, + "localhost", + 5432, + "test_username", + "test_password", + "test_db", + "test_schema", + 4, + ] + mock_get_adapter.return_value = [project.adapter.type()] + + run_dbt(["init"]) + + manager.assert_has_calls( + [ + call.confirm( + f"The profile test already exists in {os.path.join(project.profiles_dir, 'profiles.yml')}. Continue and overwrite it?" + ), + call.prompt( + "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", + type=click.INT, + ), + call.prompt( + "host (hostname for the instance)", default=None, hide_input=False, type=None + ), + call.prompt("port", default=5432, hide_input=False, type=click.INT), + call.prompt("user (dev username)", default=None, hide_input=False, type=None), + call.prompt("pass (dev password)", default=None, hide_input=True, type=None), + call.prompt( + "dbname (default database that dbt will build objects in)", + default=None, + hide_input=False, + type=None, + ), + call.prompt( + "schema (default schema that dbt will build objects in)", + default=None, + hide_input=False, + type=None, + ), + call.prompt("threads (1 or more)", default=1, hide_input=False, type=click.INT), + ] + ) + + with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: + assert ( + f.read() + == """test: + outputs: + dev: + dbname: test_db + host: localhost + pass: test_password + port: 5432 + schema: test_schema + threads: 4 + type: postgres + user: test_username + target: dev +""" + ) + + +class TestInitInsideOfProjectBase: + @pytest.fixture(scope="class") + def project_name(self, unique_schema): + return f"my_project_{unique_schema}" + + +class TestInitOutsideOfProjectBase: + @pytest.fixture(scope="class") + def project_name(self, unique_schema): + return f"my_project_{unique_schema}" + + @pytest.fixture(scope="class", autouse=True) + def setup(self, project): + # Start by removing the dbt_project.yml so that we're not in an existing project + os.remove(os.path.join(project.project_root, "dbt_project.yml")) + + +class TestInitOutsideOfProject(TestInitOutsideOfProjectBase): + @pytest.fixture(scope="class") + def dbt_profile_data(self, unique_schema): + return { + "test": { + "outputs": { + "default2": { + "type": "postgres", + "threads": 4, + "host": "localhost", + "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), + "user": os.getenv("POSTGRES_TEST_USER", "root"), + "pass": os.getenv("POSTGRES_TEST_PASS", "password"), + "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), + "schema": unique_schema, + }, + "noaccess": { + "type": "postgres", + "threads": 4, + "host": "localhost", + "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), + "user": "noaccess", + "pass": "password", + "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), + "schema": unique_schema, + }, + }, + "target": "default2", + }, + } + + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.confirm") + @patch("click.prompt") + def test_init_task_outside_of_project( + self, mock_prompt, mock_confirm, mock_get_adapter, project, project_name, unique_schema + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.attach_mock(mock_confirm, "confirm") + manager.prompt.side_effect = [ + project_name, + 1, + "localhost", + 5432, + "test_username", + "test_password", + "test_db", + "test_schema", + 4, + ] + mock_get_adapter.return_value = [project.adapter.type()] + run_dbt(["init"]) + + manager.assert_has_calls( + [ + call.prompt("Enter a name for your project (letters, digits, underscore)"), + call.prompt( + "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", + type=click.INT, + ), + call.prompt( + "host (hostname for the instance)", default=None, hide_input=False, type=None + ), + call.prompt("port", default=5432, hide_input=False, type=click.INT), + call.prompt("user (dev username)", default=None, hide_input=False, type=None), + call.prompt("pass (dev password)", default=None, hide_input=True, type=None), + call.prompt( + "dbname (default database that dbt will build objects in)", + default=None, + hide_input=False, + type=None, + ), + call.prompt( + "schema (default schema that dbt will build objects in)", + default=None, + hide_input=False, + type=None, + ), + call.prompt("threads (1 or more)", default=1, hide_input=False, type=click.INT), + ] + ) + + with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: + assert ( + f.read() + == f"""{project_name}: + outputs: + dev: + dbname: test_db + host: localhost + pass: test_password + port: 5432 + schema: test_schema + threads: 4 + type: postgres + user: test_username + target: dev +test: + outputs: + default2: + dbname: dbt + host: localhost + pass: password + port: 5432 + schema: {unique_schema} + threads: 4 + type: postgres + user: root + noaccess: + dbname: dbt + host: localhost + pass: password + port: 5432 + schema: {unique_schema} + threads: 4 + type: postgres + user: noaccess + target: default2 +""" + ) + + with open(os.path.join(project.project_root, project_name, "dbt_project.yml"), "r") as f: + assert ( + f.read() + == f""" +# Name your project! Project names should contain only lowercase characters +# and underscores. A good package name should reflect your organization's +# name or the intended use of these models +name: '{project_name}' +version: '1.0.0' + +# This setting configures which "profile" dbt uses for this project. +profile: '{project_name}' + +# These configurations specify where dbt should look for different types of files. +# The `model-paths` config, for example, states that models in this project can be +# found in the "models/" directory. You probably won't need to change these! +model-paths: ["models"] +analysis-paths: ["analyses"] +test-paths: ["tests"] +seed-paths: ["seeds"] +macro-paths: ["macros"] +snapshot-paths: ["snapshots"] + +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_packages" + + +# Configuring models +# Full documentation: https://docs.getdbt.com/docs/configuring-models + +# In this example config, we tell dbt to build all models in the example/ +# directory as views. These settings can be overridden in the individual model +# files using the `{{{{ config(...) }}}}` macro. +models: + {project_name}: + # Config indicated by + and applies to all files under models/example/ + example: + +materialized: view +""" + ) + + +class TestInitInvalidProjectNameCLI(TestInitOutsideOfProjectBase): + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.confirm") + @patch("click.prompt") + def test_init_invalid_project_name_cli( + self, mock_prompt, mock_confirm, mock_get_adapter, project_name, project + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.attach_mock(mock_confirm, "confirm") + + invalid_name = "name-with-hyphen" + valid_name = project_name + manager.prompt.side_effect = [valid_name] + mock_get_adapter.return_value = [project.adapter.type()] + + run_dbt(["init", invalid_name, "--skip-profile-setup"]) + manager.assert_has_calls( + [ + call.prompt("Enter a name for your project (letters, digits, underscore)"), + ] + ) + + +class TestInitInvalidProjectNamePrompt(TestInitOutsideOfProjectBase): + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.confirm") + @patch("click.prompt") + def test_init_invalid_project_name_prompt( + self, mock_prompt, mock_confirm, mock_get_adapter, project_name, project + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.attach_mock(mock_confirm, "confirm") + + invalid_name = "name-with-hyphen" + valid_name = project_name + manager.prompt.side_effect = [invalid_name, valid_name] + mock_get_adapter.return_value = [project.adapter.type()] + + run_dbt(["init", "--skip-profile-setup"]) + manager.assert_has_calls( + [ + call.prompt("Enter a name for your project (letters, digits, underscore)"), + call.prompt("Enter a name for your project (letters, digits, underscore)"), + ] + ) + + +class TestInitProvidedProjectNameAndSkipProfileSetup(TestInitOutsideOfProjectBase): + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.confirm") + @patch("click.prompt") + def test_init_provided_project_name_and_skip_profile_setup( + self, mock_prompt, mock_confirm, mock_get, project, project_name + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.attach_mock(mock_confirm, "confirm") + manager.prompt.side_effect = [ + 1, + "localhost", + 5432, + "test_username", + "test_password", + "test_db", + "test_schema", + 4, + ] + mock_get.return_value = [project.adapter.type()] + + # provide project name through the init command + run_dbt(["init", project_name, "--skip-profile-setup"]) + assert len(manager.mock_calls) == 0 + + with open(os.path.join(project.project_root, project_name, "dbt_project.yml"), "r") as f: + assert ( + f.read() + == f""" +# Name your project! Project names should contain only lowercase characters +# and underscores. A good package name should reflect your organization's +# name or the intended use of these models +name: '{project_name}' +version: '1.0.0' + +# This setting configures which "profile" dbt uses for this project. +profile: '{project_name}' + +# These configurations specify where dbt should look for different types of files. +# The `model-paths` config, for example, states that models in this project can be +# found in the "models/" directory. You probably won't need to change these! +model-paths: ["models"] +analysis-paths: ["analyses"] +test-paths: ["tests"] +seed-paths: ["seeds"] +macro-paths: ["macros"] +snapshot-paths: ["snapshots"] + +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_packages" + + +# Configuring models +# Full documentation: https://docs.getdbt.com/docs/configuring-models + +# In this example config, we tell dbt to build all models in the example/ +# directory as views. These settings can be overridden in the individual model +# files using the `{{{{ config(...) }}}}` macro. +models: + {project_name}: + # Config indicated by + and applies to all files under models/example/ + example: + +materialized: view +""" + ) + + +class TestInitInsideProjectAndSkipProfileSetup(TestInitInsideOfProjectBase): + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.confirm") + @patch("click.prompt") + def test_init_inside_project_and_skip_profile_setup( + self, mock_prompt, mock_confirm, mock_get, project, project_name + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.attach_mock(mock_confirm, "confirm") + + assert Path("dbt_project.yml").exists() + + # skip interactive profile setup + run_dbt(["init", "--skip-profile-setup"]) + assert len(manager.mock_calls) == 0 + + +class TestInitOutsideOfProjectWithSpecifiedProfile(TestInitOutsideOfProjectBase): + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.prompt") + def test_init_task_outside_of_project_with_specified_profile( + self, mock_prompt, mock_get_adapter, project, project_name, unique_schema, dbt_profile_data + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.prompt.side_effect = [ + project_name, + ] + mock_get_adapter.return_value = [project.adapter.type()] + run_dbt(["init", "--profile", "test"]) + + manager.assert_has_calls( + [ + call.prompt("Enter a name for your project (letters, digits, underscore)"), + ] + ) + + # profiles.yml is NOT overwritten, so assert that the text matches that of the + # original fixture + with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: + assert f.read() == yaml.safe_dump(dbt_profile_data) + + with open(os.path.join(project.project_root, project_name, "dbt_project.yml"), "r") as f: + assert ( + f.read() + == f""" +# Name your project! Project names should contain only lowercase characters +# and underscores. A good package name should reflect your organization's +# name or the intended use of these models +name: '{project_name}' +version: '1.0.0' + +# This setting configures which "profile" dbt uses for this project. +profile: 'test' + +# These configurations specify where dbt should look for different types of files. +# The `model-paths` config, for example, states that models in this project can be +# found in the "models/" directory. You probably won't need to change these! +model-paths: ["models"] +analysis-paths: ["analyses"] +test-paths: ["tests"] +seed-paths: ["seeds"] +macro-paths: ["macros"] +snapshot-paths: ["snapshots"] + +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_packages" + + +# Configuring models +# Full documentation: https://docs.getdbt.com/docs/configuring-models + +# In this example config, we tell dbt to build all models in the example/ +# directory as views. These settings can be overridden in the individual model +# files using the `{{{{ config(...) }}}}` macro. +models: + {project_name}: + # Config indicated by + and applies to all files under models/example/ + example: + +materialized: view +""" + ) + + +class TestInitOutsideOfProjectSpecifyingInvalidProfile(TestInitOutsideOfProjectBase): + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.prompt") + def test_init_task_outside_project_specifying_invalid_profile_errors( + self, mock_prompt, mock_get_adapter, project, project_name + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.prompt.side_effect = [ + project_name, + ] + mock_get_adapter.return_value = [project.adapter.type()] + + with pytest.raises(DbtRuntimeError) as error: + run_dbt(["init", "--profile", "invalid"], expect_pass=False) + assert "Could not find profile named invalid" in str(error) + + manager.assert_has_calls( + [ + call.prompt("Enter a name for your project (letters, digits, underscore)"), + ] + ) + + +class TestInitOutsideOfProjectSpecifyingProfileNoProfilesYml(TestInitOutsideOfProjectBase): + @patch("dbt.task.init._get_adapter_plugin_names") + @patch("click.prompt") + def test_init_task_outside_project_specifying_profile_no_profiles_yml_errors( + self, mock_prompt, mock_get_adapter, project, project_name + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.prompt.side_effect = [ + project_name, + ] + mock_get_adapter.return_value = [project.adapter.type()] + + # Override responses on specific files, default to 'real world' if not overriden + original_isfile = os.path.isfile + with patch( + "os.path.isfile", + new=lambda path: {"profiles.yml": False}.get( + os.path.basename(path), original_isfile(path) + ), + ): + with pytest.raises(DbtRuntimeError) as error: + run_dbt(["init", "--profile", "test"], expect_pass=False) + assert "Could not find profile named invalid" in str(error) + + manager.assert_has_calls( + [ + call.prompt("Enter a name for your project (letters, digits, underscore)"), + ] + ) diff --git a/tests/functional/test_ref_override.py b/tests/functional/test_ref_override.py new file mode 100644 index 000000000..a1bbd0fae --- /dev/null +++ b/tests/functional/test_ref_override.py @@ -0,0 +1,145 @@ +from dbt.tests.util import check_relations_equal, run_dbt +import pytest + + +models__ref_override_sql = """ +select + * +from {{ ref('seed_1') }} +""" + +macros__ref_override_macro_sql = """ +-- Macro to override ref and always return the same result +{% macro ref(modelname) %} +{% do return(builtins.ref(modelname).replace_path(identifier='seed_2')) %} +{% endmacro %} +""" + +seeds__seed_2_csv = """a,b +6,2 +12,4 +18,6""" + +seeds__seed_1_csv = """a,b +1,2 +2,4 +3,6""" + + +class TestRefOverride: + @pytest.fixture(scope="class") + def models(self): + return {"ref_override.sql": models__ref_override_sql} + + @pytest.fixture(scope="class") + def macros(self): + return {"ref_override_macro.sql": macros__ref_override_macro_sql} + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_2.csv": seeds__seed_2_csv, "seed_1.csv": seeds__seed_1_csv} + + def test_ref_override( + self, + project, + ): + run_dbt(["seed"]) + run_dbt(["run"]) + + # We want it to equal seed_2 and not seed_1. If it's + # still pointing at seed_1 then the override hasn't worked. + check_relations_equal(project.adapter, ["ref_override", "seed_2"]) + + +models__version_ref_override_sql = """ +select + * +from {{ ref('versioned_model', version=1) }} +""" + +models__package_ref_override_sql = """ +select + * +from {{ ref('package', 'versioned_model') }} +""" + +models__package_version_ref_override_sql = """ +select + * +from {{ ref('package', 'versioned_model', version=1) }} +""" + +models__v1_sql = """ +select 1 +""" + +models__v2_sql = """ +select 2 +""" + +schema__versions_yml = """ +models: + - name: versioned_model + versions: + - v: 1 + - v: 2 +""" + +macros__package_version_ref_override_macro_sql = """ +-- Macro to override ref and always return the same result +{% macro ref() %} +-- extract user-provided positional and keyword arguments +{% set version = kwargs.get('version') %} +{% set packagename = none %} +{%- if (varargs | length) == 1 -%} + {% set modelname = varargs[0] %} +{%- else -%} + {% set packagename = varargs[0] %} + {% set modelname = varargs[1] %} +{% endif %} + +{%- set version_override = 2 -%} +{%- set packagename_override = 'test' -%} +-- call builtins.ref based on provided positional arguments +{% if packagename is not none %} + {% do return(builtins.ref(packagename_override, modelname, version=version_override)) %} +{% else %} + {% do return(builtins.ref(modelname, version=version_override)) %} +{% endif %} + +{% endmacro %} +""" + + +class TestAdvancedRefOverride: + @pytest.fixture(scope="class") + def models(self): + return { + "version_ref_override.sql": models__version_ref_override_sql, + "package_ref_override.sql": models__package_ref_override_sql, + "package_version_ref_override.sql": models__package_version_ref_override_sql, + "versioned_model_v1.sql": models__v1_sql, + "versioned_model_v2.sql": models__v2_sql, + "model.sql": models__v1_sql, + "schema.yml": schema__versions_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"ref_override_macro.sql": macros__package_version_ref_override_macro_sql} + + def test_ref_override( + self, + project, + ): + run_dbt(["run"]) + + # We want versioned_ref_override to equal to versioned_model_v2, otherwise the + # ref override macro has not worked + check_relations_equal(project.adapter, ["version_ref_override", "versioned_model_v2"]) + + check_relations_equal(project.adapter, ["package_ref_override", "versioned_model_v2"]) + + check_relations_equal( + project.adapter, ["package_version_ref_override", "versioned_model_v2"] + ) diff --git a/tests/functional/test_relation_name.py b/tests/functional/test_relation_name.py new file mode 100644 index 000000000..c7eb9d9d1 --- /dev/null +++ b/tests/functional/test_relation_name.py @@ -0,0 +1,126 @@ +from dbt.contracts.results import RunStatus +from dbt.tests.util import run_dbt +import pytest + + +# Test coverage: A relation is a name for a database entity, i.e. a table or view. Every relation has +# a name. These tests verify the default Postgres rules for relation names are followed. Adapters +# may override connection rules and thus may have their own tests. + +seeds__seed = """col_A,col_B +1,2 +3,4 +5,6 +""" + +models__basic_incremental = """ +select * from {{ this.schema }}.seed + +{{ + config({ + "unique_key": "col_A", + "materialized": "incremental" + }) +}} +""" + +models__basic_table = """ +select * from {{ this.schema }}.seed + +{{ + config({ + "materialized": "table" + }) +}} +""" + + +class TestGeneratedDDLNameRules: + @classmethod + def setup_class(self): + self.incremental_filename = "my_name_is_51_characters_incremental_abcdefghijklmn" + # length is 63 + self.max_length_filename = ( + "my_name_is_max_length_chars_abcdefghijklmnopqrstuvwxyz123456789" + ) + # length is 64 + self.over_max_length_filename = ( + "my_name_is_one_over_max_length_chats_abcdefghijklmnopqrstuvwxyz1" + ) + + self.filename_for_backup_file = "my_name_is_52_characters_abcdefghijklmnopqrstuvwxyz0" + + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + run_dbt(["seed"]) + + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": seeds__seed} + + @pytest.fixture(scope="class") + def models(self): + return { + f"{self.incremental_filename}.sql": models__basic_incremental, + f"{self.filename_for_backup_file}.sql": models__basic_table, + f"{self.max_length_filename}.sql": models__basic_table, + f"{self.over_max_length_filename}.sql": models__basic_table, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + }, + } + + # Backup table name generation: + # 1. for len(relation name) <= 51, backfills + # 2. for len(relation name) > 51 characters, overwrites + # the last 12 characters with __dbt_backup + def test_name_shorter_or_equal_to_63_passes(self, project): + run_dbt( + [ + "run", + "-s", + f"{self.max_length_filename}", + f"{self.filename_for_backup_file}", + ], + ) + + def test_long_name_passes_when_temp_tables_are_generated(self): + run_dbt( + [ + "run", + "-s", + f"{self.incremental_filename}", + ], + ) + + # Run again to trigger incremental materialization + run_dbt( + [ + "run", + "-s", + f"{self.incremental_filename}", + ], + ) + + # 63 characters is the character limit for a table name in a postgres database + # (assuming compiled without changes from source) + def test_name_longer_than_63_does_not_build(self): + err_msg = ( + "Relation name 'my_name_is_one_over_max" + "_length_chats_abcdefghijklmnopqrstuvwxyz1' is longer than 63 characters" + ) + res = run_dbt( + [ + "run", + "-s", + self.over_max_length_filename, + ], + expect_pass=False, + ) + assert res[0].status == RunStatus.Error + assert err_msg in res[0].message diff --git a/tests/functional/test_selection/conftest.py b/tests/functional/test_selection/conftest.py new file mode 100644 index 000000000..2faa9e34b --- /dev/null +++ b/tests/functional/test_selection/conftest.py @@ -0,0 +1,96 @@ +from dbt.tests.fixtures.project import write_project_files +import pytest + + +tests__cf_a_b_sql = """ +select * from {{ ref('model_a') }} +cross join {{ ref('model_b') }} +where false +""" + +tests__cf_a_src_sql = """ +select * from {{ ref('model_a') }} +cross join {{ source('my_src', 'my_tbl') }} +where false +""" + +tests__just_a_sql = """ +{{ config(tags = ['data_test_tag']) }} + +select * from {{ ref('model_a') }} +where false +""" + +models__schema_yml = """ +version: 2 + +sources: + - name: my_src + schema: "{{ target.schema }}" + tables: + - name: my_tbl + identifier: model_b + columns: + - name: fun + data_tests: + - unique + +models: + - name: model_a + columns: + - name: fun + tags: [column_level_tag] + data_tests: + - unique + - relationships: + to: ref('model_b') + field: fun + tags: [test_level_tag] + - relationships: + to: source('my_src', 'my_tbl') + field: fun +""" + +models__model_b_sql = """ +{{ config( + tags = ['a_or_b'] +) }} + +select 1 as fun +""" + +models__model_a_sql = """ +{{ config( + tags = ['a_or_b'] +) }} + +select * FROM {{ref('model_b')}} +""" + + +@pytest.fixture(scope="class") +def tests(): + return { + "cf_a_b.sql": tests__cf_a_b_sql, + "cf_a_src.sql": tests__cf_a_src_sql, + "just_a.sql": tests__just_a_sql, + } + + +@pytest.fixture(scope="class") +def models(): + return { + "schema.yml": models__schema_yml, + "model_b.sql": models__model_b_sql, + "model_a.sql": models__model_a_sql, + } + + +@pytest.fixture(scope="class") +def project_files( + project_root, + tests, + models, +): + write_project_files(project_root, "tests", tests) + write_project_files(project_root, "models", models) diff --git a/tests/functional/test_selection/test_selection_expansion.py b/tests/functional/test_selection/test_selection_expansion.py new file mode 100644 index 000000000..d17f27d7f --- /dev/null +++ b/tests/functional/test_selection/test_selection_expansion.py @@ -0,0 +1,567 @@ +from dbt.tests.util import run_dbt +import pytest + + +class TestSelectionExpansion: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"config-version": 2, "test-paths": ["tests"]} + + def list_tests_and_assert( + self, + include, + exclude, + expected_tests, + indirect_selection="eager", + selector_name=None, + ): + list_args = ["ls", "--resource-type", "test"] + if include: + list_args.extend(("--select", include)) + if exclude: + list_args.extend(("--exclude", exclude)) + if indirect_selection: + list_args.extend(("--indirect-selection", indirect_selection)) + if selector_name: + list_args.extend(("--selector", selector_name)) + + listed = run_dbt(list_args) + assert len(listed) == len(expected_tests) + + test_names = [name.split(".")[-1] for name in listed] + assert sorted(test_names) == sorted(expected_tests) + + def run_tests_and_assert( + self, + include, + exclude, + expected_tests, + indirect_selection="eager", + selector_name=None, + ): + results = run_dbt(["run"]) + assert len(results) == 2 + + test_args = ["test"] + if include: + test_args.extend(("--models", include)) + if exclude: + test_args.extend(("--exclude", exclude)) + if indirect_selection: + test_args.extend(("--indirect-selection", indirect_selection)) + if selector_name: + test_args.extend(("--selector", selector_name)) + + results = run_dbt(test_args) + tests_run = [r.node.name for r in results] + assert len(tests_run) == len(expected_tests) + + assert sorted(tests_run) == sorted(expected_tests) + + def test_all_tests_no_specifiers( + self, + project, + ): + select = None + exclude = None + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "source_unique_my_src_my_tbl_fun", + "unique_model_a_fun", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_alone( + self, + project, + ): + select = "model_a" + exclude = None + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "unique_model_a_fun", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_model_b( + self, + project, + ): + select = "model_a model_b" + exclude = None + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "unique_model_a_fun", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_sources( + self, + project, + ): + select = "model_a source:*" + exclude = None + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "unique_model_a_fun", + "source_unique_my_src_my_tbl_fun", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_exclude_model_b( + self, + project, + ): + select = None + exclude = "model_b" + expected = [ + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "source_unique_my_src_my_tbl_fun", + "unique_model_a_fun", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_exclude_specific_test( + self, + project, + ): + select = "model_a" + exclude = "unique_model_a_fun" + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_exclude_specific_test_cautious( + self, + project, + ): + select = "model_a" + exclude = "unique_model_a_fun" + expected = ["just_a"] + indirect_selection = "cautious" + + self.list_tests_and_assert(select, exclude, expected, indirect_selection) + self.run_tests_and_assert(select, exclude, expected, indirect_selection) + + def test_model_a_exclude_specific_test_buildable( + self, + project, + ): + select = "model_a" + exclude = "unique_model_a_fun" + expected = [ + "just_a", + "cf_a_b", + "cf_a_src", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + ] + indirect_selection = "buildable" + + self.list_tests_and_assert(select, exclude, expected, indirect_selection) + self.run_tests_and_assert(select, exclude, expected, indirect_selection) + + def test_only_generic( + self, + project, + ): + select = "test_type:generic" + exclude = None + expected = [ + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "source_unique_my_src_my_tbl_fun", + "unique_model_a_fun", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_only_singular_unset( + self, + project, + ): + select = "model_a,test_type:singular" + exclude = None + expected = ["cf_a_b", "cf_a_src", "just_a"] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_only_singular_eager( + self, + project, + ): + select = "model_a,test_type:singular" + exclude = None + expected = ["cf_a_b", "cf_a_src", "just_a"] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_only_singular_cautious( + self, + project, + ): + select = "model_a,test_type:singular" + exclude = None + expected = ["just_a"] + indirect_selection = "cautious" + + self.list_tests_and_assert( + select, exclude, expected, indirect_selection=indirect_selection + ) + self.run_tests_and_assert(select, exclude, expected, indirect_selection=indirect_selection) + + def test_only_singular( + self, + project, + ): + select = "test_type:singular" + exclude = None + expected = ["cf_a_b", "cf_a_src", "just_a"] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_only_singular( + self, + project, + ): + select = "model_a,test_type:singular" + exclude = None + expected = ["cf_a_b", "cf_a_src", "just_a"] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_test_name_intersection( + self, + project, + ): + select = "model_a,test_name:unique" + exclude = None + expected = ["unique_model_a_fun"] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_tag_test_name_intersection( + self, + project, + ): + select = "tag:a_or_b,test_name:relationships" + exclude = None + expected = [ + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_select_column_level_tag( + self, + project, + ): + select = "tag:column_level_tag" + exclude = None + expected = [ + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "unique_model_a_fun", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_exclude_column_level_tag( + self, + project, + ): + select = None + exclude = "tag:column_level_tag" + expected = ["cf_a_b", "cf_a_src", "just_a", "source_unique_my_src_my_tbl_fun"] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_test_level_tag( + self, + project, + ): + select = "tag:test_level_tag" + exclude = None + expected = ["relationships_model_a_fun__fun__ref_model_b_"] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_exclude_data_test_tag( + self, + project, + ): + select = "model_a" + exclude = "tag:data_test_tag" + expected = [ + "cf_a_b", + "cf_a_src", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "unique_model_a_fun", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_indirect_selection( + self, + project, + ): + select = "model_a" + exclude = None + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "unique_model_a_fun", + ] + + self.list_tests_and_assert(select, exclude, expected) + self.run_tests_and_assert(select, exclude, expected) + + def test_model_a_indirect_selection_eager( + self, + project, + ): + select = "model_a" + exclude = None + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "unique_model_a_fun", + ] + indirect_selection = "eager" + + self.list_tests_and_assert(select, exclude, expected, indirect_selection) + self.run_tests_and_assert(select, exclude, expected, indirect_selection) + + def test_model_a_indirect_selection_cautious( + self, + project, + ): + select = "model_a" + exclude = None + expected = [ + "just_a", + "unique_model_a_fun", + ] + indirect_selection = "cautious" + + self.list_tests_and_assert(select, exclude, expected, indirect_selection) + self.run_tests_and_assert(select, exclude, expected, indirect_selection) + + def test_model_a_indirect_selection_buildable( + self, + project, + ): + select = "model_a" + exclude = None + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "unique_model_a_fun", + ] + indirect_selection = "buildable" + + self.list_tests_and_assert(select, exclude, expected, indirect_selection) + self.run_tests_and_assert(select, exclude, expected, indirect_selection) + + def test_model_a_indirect_selection_exclude_unique_tests( + self, + project, + ): + select = "model_a" + exclude = "test_name:unique" + indirect_selection = "eager" + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + ] + + self.list_tests_and_assert(select, exclude, expected, indirect_selection) + self.run_tests_and_assert(select, exclude, expected, indirect_selection=indirect_selection) + + def test_model_a_indirect_selection_empty(self, project): + results = run_dbt(["ls", "--indirect-selection", "empty", "--select", "model_a"]) + assert len(results) == 1 + + +class TestExpansionWithSelectors(TestSelectionExpansion): + @pytest.fixture(scope="class") + def selectors(self): + return """ + selectors: + - name: model_a_unset_indirect_selection + definition: + method: fqn + value: model_a + - name: model_a_cautious_indirect_selection + definition: + method: fqn + value: model_a + indirect_selection: "cautious" + - name: model_a_eager_indirect_selection + definition: + method: fqn + value: model_a + indirect_selection: "eager" + - name: model_a_buildable_indirect_selection + definition: + method: fqn + value: model_a + indirect_selection: "buildable" + """ + + def test_selector_model_a_unset_indirect_selection( + self, + project, + ): + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "unique_model_a_fun", + ] + + self.list_tests_and_assert( + include=None, + exclude=None, + expected_tests=expected, + selector_name="model_a_unset_indirect_selection", + ) + self.run_tests_and_assert( + include=None, + exclude=None, + expected_tests=expected, + selector_name="model_a_unset_indirect_selection", + ) + + def test_selector_model_a_cautious_indirect_selection( + self, + project, + ): + expected = ["just_a", "unique_model_a_fun"] + + self.list_tests_and_assert( + include=None, + exclude=None, + expected_tests=expected, + selector_name="model_a_cautious_indirect_selection", + ) + self.run_tests_and_assert( + include=None, + exclude=None, + expected_tests=expected, + selector_name="model_a_cautious_indirect_selection", + ) + + def test_selector_model_a_eager_indirect_selection( + self, + project, + ): + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "unique_model_a_fun", + ] + + self.list_tests_and_assert( + include=None, + exclude=None, + expected_tests=expected, + selector_name="model_a_eager_indirect_selection", + ) + self.run_tests_and_assert( + include=None, + exclude=None, + expected_tests=expected, + selector_name="model_a_eager_indirect_selection", + ) + + def test_selector_model_a_buildable_indirect_selection( + self, + project, + ): + expected = [ + "cf_a_b", + "cf_a_src", + "just_a", + "relationships_model_a_fun__fun__ref_model_b_", + "relationships_model_a_fun__fun__source_my_src_my_tbl_", + "unique_model_a_fun", + ] + + self.list_tests_and_assert( + include=None, + exclude=None, + expected_tests=expected, + selector_name="model_a_buildable_indirect_selection", + ) + self.run_tests_and_assert( + include=None, + exclude=None, + expected_tests=expected, + selector_name="model_a_buildable_indirect_selection", + ) diff --git a/tests/functional/test_severity.py b/tests/functional/test_severity.py new file mode 100644 index 000000000..54af79cba --- /dev/null +++ b/tests/functional/test_severity.py @@ -0,0 +1,122 @@ +from dbt.tests.util import run_dbt +import pytest + + +models__sample_model_sql = """ +select * from {{ source("raw", "sample_seed") }} +""" + +models__schema_yml = """ +version: 2 +sources: + - name: raw + database: "{{ target.database }}" + schema: "{{ target.schema }}" + tables: + - name: sample_seed + columns: + - name: email + data_tests: + - not_null: + severity: "{{ 'error' if var('strict', false) else 'warn' }}" +models: + - name: sample_model + columns: + - name: email + data_tests: + - not_null: + severity: "{{ 'error' if var('strict', false) else 'warn' }}" +""" + +seeds__sample_seed_csv = """id,first_name,last_name,email,gender,ip_address,updated_at +1,Judith,Kennedy,jkennedy0@phpbb.com,Female,54.60.24.128,2015-12-24 12:19:28 +2,Arthur,Kelly,akelly1@eepurl.com,Male,62.56.24.215,2015-10-28 16:22:15 +3,Rachel,Moreno,rmoreno2@msu.edu,Female,31.222.249.23,2016-04-05 02:05:30 +4,Ralph,Turner,rturner3@hp.com,Male,157.83.76.114,2016-08-08 00:06:51 +5,Laura,Gonzales,lgonzales4@howstuffworks.com,Female,30.54.105.168,2016-09-01 08:25:38 +6,Katherine,Lopez,null,Female,169.138.46.89,2016-08-30 18:52:11 +7,Jeremy,Hamilton,jhamilton6@mozilla.org,Male,231.189.13.133,2016-07-17 02:09:46 +8,Heather,Rose,hrose7@goodreads.com,Female,87.165.201.65,2015-12-29 22:03:56 +9,Gregory,Kelly,gkelly8@trellian.com,Male,154.209.99.7,2016-03-24 21:18:16 +10,Rachel,Lopez,rlopez9@themeforest.net,Female,237.165.82.71,2016-08-20 15:44:49 +11,Donna,Welch,dwelcha@shutterfly.com,Female,103.33.110.138,2016-02-27 01:41:48 +12,Russell,Lawrence,rlawrenceb@qq.com,Male,189.115.73.4,2016-06-11 03:07:09 +13,Michelle,Montgomery,mmontgomeryc@scientificamerican.com,Female,243.220.95.82,2016-06-18 16:27:19 +14,Walter,Castillo,null,Male,71.159.238.196,2016-10-06 01:55:44 +15,Robin,Mills,rmillse@vkontakte.ru,Female,172.190.5.50,2016-10-31 11:41:21 +16,Raymond,Holmes,rholmesf@usgs.gov,Male,148.153.166.95,2016-10-03 08:16:38 +17,Gary,Bishop,gbishopg@plala.or.jp,Male,161.108.182.13,2016-08-29 19:35:20 +18,Anna,Riley,arileyh@nasa.gov,Female,253.31.108.22,2015-12-11 04:34:27 +19,Sarah,Knight,sknighti@foxnews.com,Female,222.220.3.177,2016-09-26 00:49:06 +20,Phyllis,Fox,pfoxj@creativecommons.org,Female,163.191.232.95,2016-08-21 10:35:19 +""" + + +tests__sample_test_sql = """ +{{ config(severity='error' if var('strict', false) else 'warn') }} +select * from {{ ref("sample_model") }} where email is null +""" + + +@pytest.fixture(scope="class") +def models(): + return {"sample_model.sql": models__sample_model_sql, "schema.yml": models__schema_yml} + + +@pytest.fixture(scope="class") +def seeds(): + return {"sample_seed.csv": seeds__sample_seed_csv} + + +@pytest.fixture(scope="class") +def tests(): + return {"null_email.sql": tests__sample_test_sql} + + +@pytest.fixture(scope="class") +def project_config_update(): + return { + "config-version": 2, + "seed-paths": ["seeds"], + "test-paths": ["tests"], + "seeds": { + "quote_columns": False, + }, + } + + +class TestSeverity: + @pytest.fixture(scope="class", autouse=True) + def seed_and_run(self, project): + run_dbt(["seed"]) + run_dbt(["run"]) + + def test_generic_default(self, project): + results = run_dbt(["test", "--select", "test_type:generic"]) + assert len(results) == 2 + assert all([r.status == "warn" for r in results]) + assert all([r.failures == 2 for r in results]) + + def test_generic_strict(self, project): + results = run_dbt( + ["test", "--select", "test_type:generic", "--vars", '{"strict": True}'], + expect_pass=False, + ) + assert len(results) == 2 + assert all([r.status == "fail" for r in results]) + assert all([r.failures == 2 for r in results]) + + def test_singular_default(self, project): + results = run_dbt(["test", "--select", "test_type:singular"]) + assert len(results) == 1 + assert all([r.status == "warn" for r in results]) + assert all([r.failures == 2 for r in results]) + + def test_singular_strict(self, project): + results = run_dbt( + ["test", "--select", "test_type:singular", "--vars", '{"strict": True}'], + expect_pass=False, + ) + assert len(results) == 1 + assert all([r.status == "fail" for r in results]) + assert all([r.failures == 2 for r in results]) diff --git a/tests/functional/test_store_test_failures.py b/tests/functional/test_store_test_failures.py new file mode 100644 index 000000000..a42b78448 --- /dev/null +++ b/tests/functional/test_store_test_failures.py @@ -0,0 +1,46 @@ +import pytest + +from dbt.tests.adapter.store_test_failures_tests.basic import ( + StoreTestFailuresAsExceptions, + StoreTestFailuresAsGeneric, + StoreTestFailuresAsInteractions, + StoreTestFailuresAsProjectLevelEphemeral, + StoreTestFailuresAsProjectLevelOff, + StoreTestFailuresAsProjectLevelView, +) + + +class PostgresMixin: + audit_schema: str + + @pytest.fixture(scope="function", autouse=True) + def setup_audit_schema(self, project, setup_method): + # postgres only supports schema names of 63 characters + # a schema with a longer name still gets created, but the name gets truncated + self.audit_schema = self.audit_schema[:63] + + +class TestStoreTestFailuresAsInteractions(StoreTestFailuresAsInteractions, PostgresMixin): + pass + + +class TestStoreTestFailuresAsProjectLevelOff(StoreTestFailuresAsProjectLevelOff, PostgresMixin): + pass + + +class TestStoreTestFailuresAsProjectLevelView(StoreTestFailuresAsProjectLevelView, PostgresMixin): + pass + + +class TestStoreTestFailuresAsProjectLevelEphemeral( + StoreTestFailuresAsProjectLevelEphemeral, PostgresMixin +): + pass + + +class TestStoreTestFailuresAsGeneric(StoreTestFailuresAsGeneric, PostgresMixin): + pass + + +class TestStoreTestFailuresAsExceptions(StoreTestFailuresAsExceptions, PostgresMixin): + pass diff --git a/tests/functional/test_thread_count.py b/tests/functional/test_thread_count.py new file mode 100644 index 000000000..7dd5ab498 --- /dev/null +++ b/tests/functional/test_thread_count.py @@ -0,0 +1,25 @@ +from dbt.tests.util import run_dbt +import pytest + + +class TestThreadCount: + @pytest.fixture(scope="class") + def models(self): + sql = "with x as (select pg_sleep(1)) select 1" + independent_models = { + f"do_nothing_{num}.sql": sql + for num in range(1, 21) + } + return independent_models + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"config-version": 2} + + @pytest.fixture(scope="class") + def profiles_config_update(self): + return {"threads": 2} + + def test_threading_8x(self, project): + results = run_dbt(args=["run", "--threads", "16"]) + assert len(results), 20 diff --git a/tests/functional/test_timezones.py b/tests/functional/test_timezones.py new file mode 100644 index 000000000..a898a27b4 --- /dev/null +++ b/tests/functional/test_timezones.py @@ -0,0 +1,64 @@ +import os + +from dbt.tests.util import run_dbt +import pytest + + +# Canada/Saskatchewan does not observe DST so the time diff won't change depending on when it is in the year +model_sql = """ +{{ config(materialized='table') }} + +select + '{{ run_started_at.astimezone(modules.pytz.timezone("Canada/Saskatchewan")) }}' as run_started_at_saskatchewan, + '{{ run_started_at }}' as run_started_at_utc +""" + + +class TestTimezones: + @pytest.fixture(scope="class") + def models(self): + return {"timezones.sql": model_sql} + + @pytest.fixture(scope="class") + def dbt_profile_data(self, unique_schema): + return { + "test": { + "outputs": { + "dev": { + "type": "postgres", + "threads": 1, + "host": "localhost", + "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), + "user": os.getenv("POSTGRES_TEST_USER", "root"), + "pass": os.getenv("POSTGRES_TEST_PASS", "password"), + "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), + "schema": unique_schema, + }, + }, + "target": "dev", + } + } + + @pytest.fixture(scope="class") + def query(self, project): + return """ + select + run_started_at_saskatchewan, + run_started_at_utc + from {schema}.timezones + """.format( + schema=project.test_schema + ) + + # This test used to use freeze_time, but that doesn't work + # with our timestamp fields in proto messages. + def test_run_started_at(self, project, query): + results = run_dbt(["run"]) + + assert len(results) == 1 + + result = project.run_sql(query, fetch="all")[0] + saskatchewan, utc = result + + assert "+00:00" in utc + assert "-06:00" in saskatchewan diff --git a/tests/functional/test_types.py b/tests/functional/test_types.py new file mode 100644 index 000000000..02faed90a --- /dev/null +++ b/tests/functional/test_types.py @@ -0,0 +1,34 @@ +from dbt.contracts.results import NodeStatus +from dbt.tests.util import run_dbt +import pytest + + +macros_sql = """ +{% macro test_array_results() %} + + {% set sql %} + select ARRAY[1, 2, 3, 4] as mydata + {% endset %} + + {% set result = run_query(sql) %} + {% set value = result.columns['mydata'][0] %} + + {# This will be json-stringified #} + {% if value != "[1, 2, 3, 4]" %} + {% do exceptions.raise_compiler_error("Value was " ~ value) %} + {% endif %} + +{% endmacro %} +""" + + +class TestTypes: + @pytest.fixture(scope="class") + def macros(self): + return { + "macros.sql": macros_sql, + } + + def test_nested_types(self, project): + result = run_dbt(["run-operation", "test_array_results"]) + assert result.results[0].status == NodeStatus.Success diff --git a/tests/functional/test_unlogged_table.py b/tests/functional/test_unlogged_table.py new file mode 100644 index 000000000..a56a8bfe6 --- /dev/null +++ b/tests/functional/test_unlogged_table.py @@ -0,0 +1,73 @@ +from dbt.tests.util import run_dbt +import pytest + + +schema_yml = """ +version: 2 +models: + - name: table_unlogged + description: "Unlogged table model" + columns: + - name: column_a + description: "Sample description" + quote: true +""" + +table_unlogged_sql = """ +{{ config(materialized = 'table', unlogged = True) }} + +select 1 as column_a +""" + + +class TestPostgresUnloggedTable: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": schema_yml, + "table_unlogged.sql": table_unlogged_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "models": { + "test": { + "materialized": "table", + "+persist_docs": { + "relation": True, + "columns": True, + }, + } + } + } + + def test_postgres_unlogged_table_catalog(self, project): + table_name = "table_unlogged" + + results = run_dbt(["run", "--models", table_name]) + assert len(results) == 1 + + result = self.get_table_persistence(project, table_name) + assert result == "u" + + catalog = run_dbt(["docs", "generate"]) + + assert len(catalog.nodes) == 1 + + table_node = catalog.nodes["model.test.table_unlogged"] + assert table_node + assert "column_a" in table_node.columns + + def get_table_persistence(self, project, table_name): + sql = """ + SELECT + relpersistence + FROM pg_class + WHERE relname = '{table_name}' + """ + sql = sql.format(table_name=table_name, schema=project.test_schema) + result = project.run_sql(sql, fetch="one") + assert len(result) == 1 + + return result[0] diff --git a/tests/functional/unit_testing/fixtures.py b/tests/functional/unit_testing/fixtures.py new file mode 100644 index 000000000..b4a147b63 --- /dev/null +++ b/tests/functional/unit_testing/fixtures.py @@ -0,0 +1,600 @@ +my_model_vars_sql = """ +SELECT +a+b as c, +concat(string_a, string_b) as string_c, +not_testing, date_a, +{{ dbt.string_literal(type_numeric()) }} as macro_call, +{{ dbt.string_literal(var('my_test')) }} as var_call, +{{ dbt.string_literal(env_var('MY_TEST', 'default')) }} as env_var_call, +{{ dbt.string_literal(invocation_id) }} as invocation_id +FROM {{ ref('my_model_a')}} my_model_a +JOIN {{ ref('my_model_b' )}} my_model_b +ON my_model_a.id = my_model_b.id +""" + +my_model_sql = """ +SELECT +a+b as c, +concat(string_a, string_b) as string_c, +not_testing, date_a +FROM {{ ref('my_model_a')}} my_model_a +JOIN {{ ref('my_model_b' )}} my_model_b +ON my_model_a.id = my_model_b.id +""" + +my_model_a_sql = """ +SELECT +1 as a, +1 as id, +2 as not_testing, +'a' as string_a, +DATE '2020-01-02' as date_a +""" + +my_model_b_sql = """ +SELECT +2 as b, +1 as id, +2 as c, +'b' as string_b +""" + +test_my_model_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: + - {c: 2} + + - name: test_my_model_empty + model: my_model + given: + - input: ref('my_model_a') + rows: [] + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: [] + + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + + - name: test_my_model_string_concat + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, string_a: a} + - input: ref('my_model_b') + rows: + - {id: 1, string_b: b} + expect: + rows: + - {string_c: ab} + config: + tags: test_this +""" + + +test_my_model_simple_fixture_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: + - {c: 2} + + - name: test_depends_on_fixture + model: my_model + given: + - input: ref('my_model_a') + rows: [] + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + expect: + rows: [] + + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + + - name: test_has_string_c_ab + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, string_a: a} + - input: ref('my_model_b') + rows: + - {id: 1, string_b: b} + expect: + rows: + - {string_c: ab} + config: + tags: test_this +""" + + +datetime_test = """ + - name: test_my_model_datetime + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, date_a: "2020-01-01"} + - input: ref('my_model_b') + rows: + - {id: 1} + expect: + rows: + - {date_a: "2020-01-01"} +""" + +event_sql = """ +select DATE '2020-01-01' as event_time, 1 as event +union all +select DATE '2020-01-02' as event_time, 2 as event +union all +select DATE '2020-01-03' as event_time, 3 as event +""" + +datetime_test_invalid_format_key = """ + - name: test_my_model_datetime + model: my_model + given: + - input: ref('my_model_a') + format: xxxx + rows: + - {id: 1, date_a: "2020-01-01"} + - input: ref('my_model_b') + rows: + - {id: 1} + expect: + rows: + - {date_a: "2020-01-01"} +""" + +datetime_test_invalid_csv_values = """ + - name: test_my_model_datetime + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: + - {id: 1, date_a: "2020-01-01"} + - input: ref('my_model_b') + rows: + - {id: 1} + expect: + rows: + - {date_a: "2020-01-01"} +""" + +datetime_test_invalid_csv_file_values = """ + - name: test_my_model_datetime + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: + - {id: 1, date_a: "2020-01-01"} + - input: ref('my_model_b') + rows: + - {id: 1} + expect: + rows: + - {date_a: "2020-01-01"} +""" + +event_sql = """ +select DATE '2020-01-01' as event_time, 1 as event +union all +select DATE '2020-01-02' as event_time, 2 as event +union all +select DATE '2020-01-03' as event_time, 3 as event +""" + +my_incremental_model_sql = """ +{{ + config( + materialized='incremental' + ) +}} + +select * from {{ ref('events') }} +{% if is_incremental() %} +where event_time > (select max(event_time) from {{ this }}) +{% endif %} +""" + +test_my_model_incremental_yml = """ +unit_tests: + - name: incremental_false + model: my_incremental_model + overrides: + macros: + is_incremental: false + given: + - input: ref('events') + rows: + - {event_time: "2020-01-01", event: 1} + expect: + rows: + - {event_time: "2020-01-01", event: 1} + - name: incremental_true + model: my_incremental_model + overrides: + macros: + is_incremental: true + given: + - input: ref('events') + rows: + - {event_time: "2020-01-01", event: 1} + - {event_time: "2020-01-02", event: 2} + - {event_time: "2020-01-03", event: 3} + - input: this + rows: + - {event_time: "2020-01-01", event: 1} + expect: + rows: + - {event_time: "2020-01-02", event: 2} + - {event_time: "2020-01-03", event: 3} +""" + +# -- inline csv tests + +test_my_model_csv_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + rows: | + c + 2 + + - name: test_my_model_empty + model: my_model + given: + - input: ref('my_model_a') + rows: [] + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + rows: [] + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + - name: test_my_model_string_concat + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,string_a + 1,a + - input: ref('my_model_b') + format: csv + rows: | + id,string_b + 1,b + expect: + format: csv + rows: | + string_c + ab + config: + tags: test_this +""" + +# -- csv file tests +test_my_model_file_csv_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_numeric_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + expect: + format: csv + fixture: test_my_model_basic_fixture + + - name: test_my_model_empty + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_empty_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + expect: + format: csv + fixture: test_my_model_a_empty_fixture + + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_numeric_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + + - name: test_my_model_string_concat + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_b_fixture + expect: + format: csv + fixture: test_my_model_concat_fixture + config: + tags: test_this +""" + +test_my_model_fixture_csv = """id,b +1,2 +2,2 +""" + +test_my_model_a_fixture_csv = """id,string_a +1,a +""" + +test_my_model_a_empty_fixture_csv = """ +""" + +test_my_model_a_numeric_fixture_csv = """id,a +1,1 +""" + +test_my_model_b_fixture_csv = """id,string_b +1,b +""" + +test_my_model_basic_fixture_csv = """c +2 +""" + +test_my_model_concat_fixture_csv = """string_c +ab +""" + +# -- mixed inline and file csv +test_my_model_mixed_csv_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + fixture: test_my_model_basic_fixture + + - name: test_my_model_empty + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_empty_fixture + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + fixture: test_my_model_a_empty_fixture + + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + + - name: test_my_model_string_concat + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_b_fixture + expect: + format: csv + rows: | + string_c + ab + config: + tags: test_this +""" + +# unit tests with errors + +# -- fixture file doesn't exist +test_my_model_missing_csv_yml = """ +unit_tests: + - name: test_missing_csv_file + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + fixture: fake_fixture +""" + +test_my_model_duplicate_csv_yml = """ +unit_tests: + - name: test_missing_csv_file + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + fixture: test_my_model_basic_fixture +""" diff --git a/tests/functional/unit_testing/test_csv_fixtures.py b/tests/functional/unit_testing/test_csv_fixtures.py new file mode 100644 index 000000000..4fe4c6ad4 --- /dev/null +++ b/tests/functional/unit_testing/test_csv_fixtures.py @@ -0,0 +1,251 @@ +from dbt.exceptions import ParsingError, YamlParseDictError, DuplicateResourceNameError +from dbt.tests.util import rm_file, run_dbt, write_file +import pytest + +from fixtures import ( + datetime_test, + datetime_test_invalid_csv_values, + datetime_test_invalid_format_key, + my_model_a_sql, + my_model_b_sql, + my_model_sql, + test_my_model_a_empty_fixture_csv, + test_my_model_a_fixture_csv, + test_my_model_a_numeric_fixture_csv, + test_my_model_b_fixture_csv, + test_my_model_basic_fixture_csv, + test_my_model_concat_fixture_csv, + test_my_model_csv_yml, + test_my_model_duplicate_csv_yml, + test_my_model_file_csv_yml, + test_my_model_fixture_csv, + test_my_model_missing_csv_yml, + test_my_model_mixed_csv_yml, +) + + +class TestUnitTestsWithInlineCSV: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_csv_yml + datetime_test, + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + # Check error with invalid format key + write_file( + test_my_model_csv_yml + datetime_test_invalid_format_key, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(YamlParseDictError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + # Check error with csv format defined but dict on rows + write_file( + test_my_model_csv_yml + datetime_test_invalid_csv_values, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + +class TestUnitTestsWithFileCSV: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_file_csv_yml + datetime_test, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "test_my_model_fixture.csv": test_my_model_fixture_csv, + "test_my_model_a_fixture.csv": test_my_model_a_fixture_csv, + "test_my_model_b_fixture.csv": test_my_model_b_fixture_csv, + "test_my_model_basic_fixture.csv": test_my_model_basic_fixture_csv, + "test_my_model_a_numeric_fixture.csv": test_my_model_a_numeric_fixture_csv, + "test_my_model_a_empty_fixture.csv": test_my_model_a_empty_fixture_csv, + "test_my_model_concat_fixture.csv": test_my_model_concat_fixture_csv, + } + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + manifest = run_dbt(["parse"]) # Note: this manifest is deserialized from msgpack + fixture = manifest.fixtures["fixture.test.test_my_model_a_fixture"] + fixture_source_file = manifest.files[fixture.file_id] + assert fixture_source_file.fixture == "fixture.test.test_my_model_a_fixture" + assert fixture_source_file.unit_tests == [ + "unit_test.test.my_model.test_my_model_string_concat" + ] + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + # Check partial parsing remove fixture file + rm_file(project.project_root, "tests", "fixtures", "test_my_model_a_fixture.csv") + with pytest.raises( + ParsingError, + match="File not found for fixture 'test_my_model_a_fixture' in unit tests", + ): + run_dbt(["test", "--select", "my_model"], expect_pass=False) + # put back file and check that it works + write_file( + test_my_model_a_fixture_csv, + project.project_root, + "tests", + "fixtures", + "test_my_model_a_fixture.csv", + ) + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + # Now update file + write_file( + test_my_model_a_fixture_csv + "2,2", + project.project_root, + "tests", + "fixtures", + "test_my_model_a_fixture.csv", + ) + manifest = run_dbt(["parse"]) + fixture = manifest.fixtures["fixture.test.test_my_model_a_fixture"] + fixture_source_file = manifest.files[fixture.file_id] + assert "2,2" in fixture_source_file.contents + assert fixture.rows == [{"id": "1", "string_a": "a"}, {"id": "2", "string_a": "2"}] + + # Check error with invalid format key + write_file( + test_my_model_file_csv_yml + datetime_test_invalid_format_key, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(YamlParseDictError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + # Check error with csv format defined but dict on rows + write_file( + test_my_model_file_csv_yml + datetime_test_invalid_csv_values, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + +class TestUnitTestsWithMixedCSV: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_mixed_csv_yml + datetime_test, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "test_my_model_fixture.csv": test_my_model_fixture_csv, + "test_my_model_a_fixture.csv": test_my_model_a_fixture_csv, + "test_my_model_b_fixture.csv": test_my_model_b_fixture_csv, + "test_my_model_basic_fixture.csv": test_my_model_basic_fixture_csv, + "test_my_model_a_numeric_fixture.csv": test_my_model_a_numeric_fixture_csv, + "test_my_model_a_empty_fixture.csv": test_my_model_a_empty_fixture_csv, + "test_my_model_concat_fixture.csv": test_my_model_concat_fixture_csv, + } + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + # Check error with invalid format key + write_file( + test_my_model_mixed_csv_yml + datetime_test_invalid_format_key, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(YamlParseDictError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + # Check error with csv format defined but dict on rows + write_file( + test_my_model_mixed_csv_yml + datetime_test_invalid_csv_values, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + +class TestUnitTestsMissingCSVFile: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_missing_csv_yml, + } + + def test_missing(self, project): + with pytest.raises(ParsingError): + run_dbt(["run"]) + + +class TestUnitTestsDuplicateCSVFile: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_duplicate_csv_yml, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "one-folder": { + "test_my_model_basic_fixture.csv": test_my_model_basic_fixture_csv, + }, + "another-folder": { + "test_my_model_basic_fixture.csv": test_my_model_basic_fixture_csv, + }, + } + } + + def test_duplicate(self, project): + with pytest.raises(DuplicateResourceNameError): + run_dbt(["run"]) diff --git a/tests/functional/unit_testing/test_state.py b/tests/functional/unit_testing/test_state.py new file mode 100644 index 000000000..1a4ee90bb --- /dev/null +++ b/tests/functional/unit_testing/test_state.py @@ -0,0 +1,130 @@ +from copy import deepcopy +import os +import shutil + +from dbt.tests.util import run_dbt, write_config_file, write_file +import pytest + +from fixtures import ( + my_model_a_sql, + my_model_b_sql, + my_model_vars_sql, + test_my_model_b_fixture_csv as test_my_model_fixture_csv_modified, + test_my_model_fixture_csv, + test_my_model_simple_fixture_yml, +) + + +class UnitTestState: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_vars_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_simple_fixture_yml, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "test_my_model_fixture.csv": test_my_model_fixture_csv, + } + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"vars": {"my_test": "my_test_var"}} + + def copy_state(self, project_root): + state_path = os.path.join(project_root, "state") + if not os.path.exists(state_path): + os.makedirs(state_path) + shutil.copyfile( + f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json" + ) + shutil.copyfile( + f"{project_root}/target/run_results.json", f"{project_root}/state/run_results.json" + ) + + +class TestUnitTestStateModified(UnitTestState): + def test_state_modified(self, project): + run_dbt(["run"]) + run_dbt(["test"], expect_pass=False) + self.copy_state(project.project_root) + + # no changes + results = run_dbt(["test", "--select", "state:modified", "--state", "state"]) + assert len(results) == 0 + + # change underlying fixture file + write_file( + test_my_model_fixture_csv_modified, + project.project_root, + "tests", + "fixtures", + "test_my_model_fixture.csv", + ) + results = run_dbt( + ["test", "--select", "state:modified", "--state", "state"], expect_pass=True + ) + assert len(results) == 1 + assert results[0].node.name.endswith("test_depends_on_fixture") + # reset changes + self.copy_state(project.project_root) + + # change unit test definition of a single unit test + with_changes = test_my_model_simple_fixture_yml.replace("{string_c: ab}", "{string_c: bc}") + write_config_file(with_changes, project.project_root, "models", "test_my_model.yml") + results = run_dbt( + ["test", "--select", "state:modified", "--state", "state"], expect_pass=False + ) + assert len(results) == 1 + assert results[0].node.name.endswith("test_has_string_c_ab") + + # change underlying model logic + write_config_file( + test_my_model_simple_fixture_yml, project.project_root, "models", "test_my_model.yml" + ) + write_file( + my_model_vars_sql.replace("a+b as c,", "a + b as c,"), + project.project_root, + "models", + "my_model.sql", + ) + results = run_dbt( + ["test", "--select", "state:modified", "--state", "state"], expect_pass=False + ) + assert len(results) == 4 + + +class TestUnitTestRetry(UnitTestState): + def test_unit_test_retry(self, project): + run_dbt(["run"]) + run_dbt(["test"], expect_pass=False) + self.copy_state(project.project_root) + + results = run_dbt(["retry"], expect_pass=False) + assert len(results) == 1 + + +class TestUnitTestDeferState(UnitTestState): + @pytest.fixture(scope="class") + def other_schema(self, unique_schema): + return unique_schema + "_other" + + @pytest.fixture(scope="class") + def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema): + outputs = {"default": dbt_profile_target, "otherschema": deepcopy(dbt_profile_target)} + outputs["default"]["schema"] = unique_schema + outputs["otherschema"]["schema"] = other_schema + return {"test": {"outputs": outputs, "target": "default"}} + + def test_unit_test_defer_state(self, project): + run_dbt(["run", "--target", "otherschema"]) + self.copy_state(project.project_root) + results = run_dbt(["test", "--defer", "--state", "state"], expect_pass=False) + assert len(results) == 4 + assert sorted([r.status for r in results]) == ["fail", "pass", "pass", "pass"] diff --git a/tests/functional/unit_testing/test_unit_testing.py b/tests/functional/unit_testing/test_unit_testing.py new file mode 100644 index 000000000..4983e01b3 --- /dev/null +++ b/tests/functional/unit_testing/test_unit_testing.py @@ -0,0 +1,236 @@ +from dbt.contracts.results import NodeStatus +from dbt.exceptions import DuplicateResourceNameError, ParsingError +from dbt.tests.util import get_manifest, run_dbt, write_file +import pytest + +from fixtures import ( + datetime_test, + event_sql, + my_incremental_model_sql, + my_model_a_sql, + my_model_b_sql, + my_model_vars_sql, + test_my_model_incremental_yml, + test_my_model_yml, +) + + +class TestUnitTests: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_vars_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_yml + datetime_test, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"vars": {"my_test": "my_test_var"}} + + def test_basic(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + results = run_dbt(["build", "--select", "my_model"], expect_pass=False) + assert len(results) == 6 + for result in results: + if result.node.unique_id == "model.test.my_model": + result.status == NodeStatus.Skipped + + # Test select by test name + results = run_dbt(["test", "--select", "test_name:test_my_model_string_concat"]) + assert len(results) == 1 + + # Select, method not specified + results = run_dbt(["test", "--select", "test_my_model_overrides"]) + assert len(results) == 1 + + # Select using tag + results = run_dbt(["test", "--select", "tag:test_this"]) + assert len(results) == 1 + + # Partial parsing... remove test + write_file(test_my_model_yml, project.project_root, "models", "test_my_model.yml") + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 4 + + # Partial parsing... put back removed test + write_file( + test_my_model_yml + datetime_test, project.project_root, "models", "test_my_model.yml" + ) + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + manifest = get_manifest(project.project_root) + assert len(manifest.unit_tests) == 5 + # Every unit test has a depends_on to the model it tests + for unit_test_definition in manifest.unit_tests.values(): + assert unit_test_definition.depends_on.nodes[0] == "model.test.my_model" + + # Check for duplicate unit test name + # this doesn't currently pass with partial parsing because of the root problem + # described in https://github.com/dbt-labs/dbt-core/issues/8982 + write_file( + test_my_model_yml + datetime_test + datetime_test, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(DuplicateResourceNameError): + run_dbt(["run", "--no-partial-parse", "--select", "my_model"]) + + +class TestUnitTestIncrementalModel: + @pytest.fixture(scope="class") + def models(self): + return { + "my_incremental_model.sql": my_incremental_model_sql, + "events.sql": event_sql, + "test_my_incremental_model.yml": test_my_model_incremental_yml, + } + + def test_basic(self, project): + results = run_dbt(["run"]) + assert len(results) == 2 + + # Select by model name + results = run_dbt(["test", "--select", "my_incremental_model"], expect_pass=True) + assert len(results) == 2 + + +my_new_model = """ +select +my_favorite_seed.id, +a + b as c +from {{ ref('my_favorite_seed') }} as my_favorite_seed +inner join {{ ref('my_favorite_model') }} as my_favorite_model +on my_favorite_seed.id = my_favorite_model.id +""" + +my_favorite_model = """ +select +2 as id, +3 as b +""" + +seed_my_favorite_seed = """id,a +1,5 +2,4 +3,3 +4,2 +5,1 +""" + +schema_yml_explicit_seed = """ +unit_tests: + - name: t + model: my_new_model + given: + - input: ref('my_favorite_seed') + rows: + - {id: 1, a: 10} + - input: ref('my_favorite_model') + rows: + - {id: 1, b: 2} + expect: + rows: + - {id: 1, c: 12} +""" + +schema_yml_implicit_seed = """ +unit_tests: + - name: t + model: my_new_model + given: + - input: ref('my_favorite_seed') + - input: ref('my_favorite_model') + rows: + - {id: 1, b: 2} + expect: + rows: + - {id: 1, c: 7} +""" + +schema_yml_nonexistent_seed = """ +unit_tests: + - name: t + model: my_new_model + given: + - input: ref('my_second_favorite_seed') + - input: ref('my_favorite_model') + rows: + - {id: 1, b: 2} + expect: + rows: + - {id: 1, c: 7} +""" + + +class TestUnitTestExplicitSeed: + @pytest.fixture(scope="class") + def seeds(self): + return {"my_favorite_seed.csv": seed_my_favorite_seed} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_new_model.sql": my_new_model, + "my_favorite_model.sql": my_favorite_model, + "schema.yml": schema_yml_explicit_seed, + } + + def test_explicit_seed(self, project): + run_dbt(["seed"]) + run_dbt(["run"]) + + # Select by model name + results = run_dbt(["test", "--select", "my_new_model"], expect_pass=True) + assert len(results) == 1 + + +class TestUnitTestImplicitSeed: + @pytest.fixture(scope="class") + def seeds(self): + return {"my_favorite_seed.csv": seed_my_favorite_seed} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_new_model.sql": my_new_model, + "my_favorite_model.sql": my_favorite_model, + "schema.yml": schema_yml_implicit_seed, + } + + def test_implicit_seed(self, project): + run_dbt(["seed"]) + run_dbt(["run"]) + + # Select by model name + results = run_dbt(["test", "--select", "my_new_model"], expect_pass=True) + assert len(results) == 1 + + +class TestUnitTestNonexistentSeed: + @pytest.fixture(scope="class") + def seeds(self): + return {"my_favorite_seed.csv": seed_my_favorite_seed} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_new_model.sql": my_new_model, + "my_favorite_model.sql": my_favorite_model, + "schema.yml": schema_yml_nonexistent_seed, + } + + def test_nonexistent_seed(self, project): + with pytest.raises( + ParsingError, match="Unable to find seed 'test.my_second_favorite_seed' for unit tests" + ): + run_dbt(["test", "--select", "my_new_model"], expect_pass=False) diff --git a/tests/functional/unit_testing/test_ut_dependency.py b/tests/functional/unit_testing/test_ut_dependency.py new file mode 100644 index 000000000..fd120a712 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_dependency.py @@ -0,0 +1,115 @@ +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import get_unique_ids_in_results, run_dbt +import pytest + + +local_dependency__dbt_project_yml = """ + +name: 'local_dep' +version: '1.0' + +seeds: + quote_columns: False + +""" + +local_dependency__schema_yml = """ +sources: + - name: seed_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + columns: + - name: id + data_tests: + - unique + +unit_tests: + - name: test_dep_model_id + model: dep_model + given: + - input: ref('seed') + rows: + - {id: 1, name: Joe} + expect: + rows: + - {name_id: Joe_1} + + +""" + +local_dependency__dep_model_sql = """ +select name || '_' || id as name_id from {{ ref('seed') }} + +""" + +local_dependency__seed_csv = """id,name +1,Mary +2,Sam +3,John +""" + +my_model_sql = """ +select * from {{ ref('dep_model') }} +""" + +my_model_schema_yml = """ +unit_tests: + - name: test_my_model_name_id + model: my_model + given: + - input: ref('dep_model') + rows: + - {name_id: Joe_1} + expect: + rows: + - {name_id: Joe_1} +""" + + +class TestUnitTestingInDependency: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": local_dependency__dbt_project_yml, + "models": { + "schema.yml": local_dependency__schema_yml, + "dep_model.sql": local_dependency__dep_model_sql, + }, + "seeds": {"seed.csv": local_dependency__seed_csv}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "schema.yml": my_model_schema_yml, + } + + def test_unit_test_in_dependency(self, project): + run_dbt(["deps"]) + run_dbt(["seed"]) + results = run_dbt(["run"]) + assert len(results) == 2 + + results = run_dbt(["test"]) + assert len(results) == 3 + unique_ids = get_unique_ids_in_results(results) + assert "unit_test.local_dep.dep_model.test_dep_model_id" in unique_ids + + results = run_dbt(["test", "--select", "test_type:unit"]) + # two unit tests, 1 in root package, one in local_dep package + assert len(results) == 2 + + results = run_dbt(["test", "--select", "local_dep"]) + # 2 tests in local_dep package + assert len(results) == 2 + + results = run_dbt(["test", "--select", "test"]) + # 1 test in root package + assert len(results) == 1 diff --git a/tests/functional/unit_testing/test_ut_sources.py b/tests/functional/unit_testing/test_ut_sources.py new file mode 100644 index 000000000..488b50e3c --- /dev/null +++ b/tests/functional/unit_testing/test_ut_sources.py @@ -0,0 +1,104 @@ +from dbt.contracts.results import RunStatus, TestStatus +from dbt.tests.util import run_dbt, write_file +import pytest + + +raw_customers_csv = """id,first_name,last_name,email +1,Michael,Perez,mperez0@chronoengine.com +2,Shawn,Mccoy,smccoy1@reddit.com +3,Kathleen,Payne,kpayne2@cargocollective.com +4,Jimmy,Cooper,jcooper3@cargocollective.com +5,Katherine,Rice,krice4@typepad.com +6,Sarah,Ryan,sryan5@gnu.org +7,Martin,Mcdonald,mmcdonald6@opera.com +8,Frank,Robinson,frobinson7@wunderground.com +9,Jennifer,Franklin,jfranklin8@mail.ru +10,Henry,Welch,hwelch9@list-manage.com +""" + +schema_sources_yml = """ +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_customers + columns: + - name: id + data_tests: + - not_null: + severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" + - unique + - name: first_name + - name: last_name + - name: email +unit_tests: + - name: test_customers + model: customers + given: + - input: source('seed_sources', 'raw_customers') + rows: + - {id: 1, first_name: Emily} + expect: + rows: + - {id: 1, first_name: Emily} +""" + +customers_sql = """ +select * from {{ source('seed_sources', 'raw_customers') }} +""" + +failing_test_schema_yml = """ + - name: fail_test_customers + model: customers + given: + - input: source('seed_sources', 'raw_customers') + rows: + - {id: 1, first_name: Emily} + expect: + rows: + - {id: 1, first_name: Joan} +""" + + +class TestUnitTestSourceInput: + @pytest.fixture(scope="class") + def seeds(self): + return { + "raw_customers.csv": raw_customers_csv, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "customers.sql": customers_sql, + "sources.yml": schema_sources_yml, + } + + def test_source_input(self, project): + results = run_dbt(["seed"]) + results = run_dbt(["run"]) + len(results) == 1 + + results = run_dbt(["test", "--select", "test_type:unit"]) + assert len(results) == 1 + + results = run_dbt(["build"]) + assert len(results) == 5 + result_unique_ids = [result.node.unique_id for result in results] + assert len(result_unique_ids) == 5 + assert "unit_test.test.customers.test_customers" in result_unique_ids + + # write failing unit test + write_file( + schema_sources_yml + failing_test_schema_yml, + project.project_root, + "models", + "sources.yml", + ) + results = run_dbt(["build"], expect_pass=False) + for result in results: + if result.node.unique_id == "model.test.customers": + assert result.status == RunStatus.Skipped + elif result.node.unique_id == "model.test.customers": + assert result.status == TestStatus.Fail + assert len(results) == 6 diff --git a/tests/functional/utils.py b/tests/functional/utils.py new file mode 100644 index 000000000..ddfe36785 --- /dev/null +++ b/tests/functional/utils.py @@ -0,0 +1,14 @@ +import os +from contextlib import contextmanager +from typing import Optional +from pathlib import Path + + +@contextmanager +def up_one(return_path: Optional[Path] = None): + current_path = Path.cwd() + os.chdir("../") + try: + yield + finally: + os.chdir(return_path or current_path) From 5390f0d7dbdcb389be6415fe4f094fb34e9cbb75 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Tue, 23 Jan 2024 17:22:11 -0500 Subject: [PATCH 010/114] fix relative import issues --- pyproject.toml | 2 +- tests/functional/build_command/test_build.py | 2 +- tests/functional/configs/test_configs.py | 5 ++++- .../functional/configs/test_disabled_model.py | 2 +- .../context_methods/test_secret_env_vars.py | 2 +- .../context_methods/test_var_dependency.py | 2 +- .../custom_aliases/test_custom_aliases.py | 2 +- .../defer_state/test_defer_state.py | 2 +- .../defer_state/test_group_updates.py | 2 +- .../defer_state/test_modified_state.py | 2 +- .../defer_state/test_run_results_state.py | 2 +- .../deprecations/test_config_deprecations.py | 2 +- .../deprecations/test_deprecations.py | 2 +- .../functional/exit_codes/test_exit_codes.py | 2 +- .../exposures/test_exposure_configs.py | 2 +- tests/functional/exposures/test_exposures.py | 2 +- .../graph_selection/test_graph_selection.py | 2 +- tests/functional/projects/__init__.py | 6 ++--- tests/functional/test_config.py | 2 +- .../unit_testing/test_csv_fixtures.py | 2 +- tests/functional/unit_testing/test_state.py | 2 +- .../unit_testing/test_unit_testing.py | 2 +- tests/functional/utils.py | 22 +++++++++++++++---- 23 files changed, 45 insertions(+), 28 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0067b0913..68b2c526c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,7 +48,7 @@ test = [ "pytest-xdist", ] integration = [ - "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@migrate-dbt-tests-adapter#egg=dbt-tests-adapter&subdirectory=dbt-tests-adapter", + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#egg=dbt-tests-adapter&subdirectory=dbt-tests-adapter", ] build = [ "wheel", diff --git a/tests/functional/build_command/test_build.py b/tests/functional/build_command/test_build.py index 7ffa0a20f..a14b93ebc 100644 --- a/tests/functional/build_command/test_build.py +++ b/tests/functional/build_command/test_build.py @@ -1,7 +1,7 @@ from dbt.tests.util import run_dbt import pytest -import fixtures +from tests.functional.build_command import fixtures class TestBuildBase: diff --git a/tests/functional/configs/test_configs.py b/tests/functional/configs/test_configs.py index 18efccfc7..d99b32c48 100644 --- a/tests/functional/configs/test_configs.py +++ b/tests/functional/configs/test_configs.py @@ -10,7 +10,10 @@ from dbt_common.dataclass_schema import ValidationError import pytest -from fixtures import BaseConfigProject, simple_snapshot +from tests.functional.configs.fixtures import ( + BaseConfigProject, + simple_snapshot, +) class TestConfigs(BaseConfigProject): diff --git a/tests/functional/configs/test_disabled_model.py b/tests/functional/configs/test_disabled_model.py index d724cd956..8355d9bf9 100644 --- a/tests/functional/configs/test_disabled_model.py +++ b/tests/functional/configs/test_disabled_model.py @@ -4,7 +4,7 @@ from dbt_common.exceptions import CompilationError import pytest -import fixtures +from tests.functional.configs import fixtures # ensure double disabled doesn't throw error when set at schema level diff --git a/tests/functional/context_methods/test_secret_env_vars.py b/tests/functional/context_methods/test_secret_env_vars.py index b4949d72a..8fe8cbf7b 100644 --- a/tests/functional/context_methods/test_secret_env_vars.py +++ b/tests/functional/context_methods/test_secret_env_vars.py @@ -6,7 +6,7 @@ from dbt_common.exceptions import DbtInternalError import pytest -from first_dependency import FirstDependencyProject +from tests.functional.context_methods.first_dependency import FirstDependencyProject secret_bad__context_sql = """ diff --git a/tests/functional/context_methods/test_var_dependency.py b/tests/functional/context_methods/test_var_dependency.py index e6c1a501c..a0c06db76 100644 --- a/tests/functional/context_methods/test_var_dependency.py +++ b/tests/functional/context_methods/test_var_dependency.py @@ -1,7 +1,7 @@ from dbt.tests.util import check_relations_equal, run_dbt import pytest -from first_dependency import ( +from tests.functional.context_methods.first_dependency import ( FirstDependencyConfigProject, FirstDependencyProject, ) diff --git a/tests/functional/custom_aliases/test_custom_aliases.py b/tests/functional/custom_aliases/test_custom_aliases.py index 9c59be773..1a55b8cc8 100644 --- a/tests/functional/custom_aliases/test_custom_aliases.py +++ b/tests/functional/custom_aliases/test_custom_aliases.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt -import fixtures +from tests.functional.custom_aliases import fixtures class TestAliases: diff --git a/tests/functional/defer_state/test_defer_state.py b/tests/functional/defer_state/test_defer_state.py index 814c2d729..45c1d93c8 100644 --- a/tests/functional/defer_state/test_defer_state.py +++ b/tests/functional/defer_state/test_defer_state.py @@ -8,7 +8,7 @@ from dbt.tests.util import rm_file, run_dbt, write_file import pytest -import fixtures +from tests.functional.defer_state import fixtures class BaseDeferState: diff --git a/tests/functional/defer_state/test_group_updates.py b/tests/functional/defer_state/test_group_updates.py index 78c636842..5f3e8006b 100644 --- a/tests/functional/defer_state/test_group_updates.py +++ b/tests/functional/defer_state/test_group_updates.py @@ -4,7 +4,7 @@ from dbt.tests.util import copy_file, run_dbt, write_file import pytest -import fixtures +from tests.functional.defer_state import fixtures class GroupSetup: diff --git a/tests/functional/defer_state/test_modified_state.py b/tests/functional/defer_state/test_modified_state.py index fc5de2f36..4b07919c5 100644 --- a/tests/functional/defer_state/test_modified_state.py +++ b/tests/functional/defer_state/test_modified_state.py @@ -14,7 +14,7 @@ from dbt_common.exceptions import CompilationError import pytest -import fixtures +from tests.functional.defer_state import fixtures class BaseModifiedState: diff --git a/tests/functional/defer_state/test_run_results_state.py b/tests/functional/defer_state/test_run_results_state.py index 795cdb974..ae5941c7c 100644 --- a/tests/functional/defer_state/test_run_results_state.py +++ b/tests/functional/defer_state/test_run_results_state.py @@ -4,7 +4,7 @@ from dbt.tests.util import run_dbt, write_file import pytest -import fixtures +from tests.functional.defer_state import fixtures class BaseRunResultsState: diff --git a/tests/functional/deprecations/test_config_deprecations.py b/tests/functional/deprecations/test_config_deprecations.py index 218c795f3..f8623c1ae 100644 --- a/tests/functional/deprecations/test_config_deprecations.py +++ b/tests/functional/deprecations/test_config_deprecations.py @@ -5,7 +5,7 @@ from dbt_common.exceptions import CompilationError import pytest -import fixtures +from tests.functional.deprecations import fixtures # test deprecation messages diff --git a/tests/functional/deprecations/test_deprecations.py b/tests/functional/deprecations/test_deprecations.py index 185157a0c..1f4a31c28 100644 --- a/tests/functional/deprecations/test_deprecations.py +++ b/tests/functional/deprecations/test_deprecations.py @@ -4,7 +4,7 @@ import pytest import yaml -import fixtures +from tests.functional.deprecations import fixtures class TestConfigPathDeprecation: diff --git a/tests/functional/exit_codes/test_exit_codes.py b/tests/functional/exit_codes/test_exit_codes.py index d237a6700..5b25f2d30 100644 --- a/tests/functional/exit_codes/test_exit_codes.py +++ b/tests/functional/exit_codes/test_exit_codes.py @@ -6,7 +6,7 @@ ) import pytest -import fixtures +from tests.functional.exit_codes import fixtures class BaseConfigProject: diff --git a/tests/functional/exposures/test_exposure_configs.py b/tests/functional/exposures/test_exposure_configs.py index d1585e575..f405eb75a 100644 --- a/tests/functional/exposures/test_exposure_configs.py +++ b/tests/functional/exposures/test_exposure_configs.py @@ -3,7 +3,7 @@ from dbt_common.dataclass_schema import ValidationError import pytest -import fixtures +from tests.functional.exposures import fixtures class ExposureConfigTests: diff --git a/tests/functional/exposures/test_exposures.py b/tests/functional/exposures/test_exposures.py index 9d0dbd43f..2108b93df 100644 --- a/tests/functional/exposures/test_exposures.py +++ b/tests/functional/exposures/test_exposures.py @@ -1,7 +1,7 @@ from dbt.tests.util import get_manifest, run_dbt import pytest -import fixtures +from tests.functional.exposures import fixtures class TestBasicExposures: diff --git a/tests/functional/graph_selection/test_graph_selection.py b/tests/functional/graph_selection/test_graph_selection.py index 28d5ff4e0..2314a9240 100644 --- a/tests/functional/graph_selection/test_graph_selection.py +++ b/tests/functional/graph_selection/test_graph_selection.py @@ -288,7 +288,7 @@ def test_exposure_parents(self, project): ) -class TestListPathGraphSelection(SelectionFixtures): +class TestListPathGraphSelection(GraphSelection): def test_list_select_with_project_dir(self, project): # Check that list command works os.chdir( diff --git a/tests/functional/projects/__init__.py b/tests/functional/projects/__init__.py index 3e12bd480..2ac8bfcb6 100644 --- a/tests/functional/projects/__init__.py +++ b/tests/functional/projects/__init__.py @@ -1,3 +1,3 @@ -from dbt_integration import dbt_integration -from graph_selection import GraphSelection -from jaffle_shop import JaffleShop +from tests.functional.projects.dbt_integration import dbt_integration +from tests.functional.projects.graph_selection import GraphSelection +from tests.functional.projects.jaffle_shop import JaffleShop diff --git a/tests/functional/test_config.py b/tests/functional/test_config.py index 100cdbb66..212fafba1 100644 --- a/tests/functional/test_config.py +++ b/tests/functional/test_config.py @@ -11,7 +11,7 @@ import yaml from dbt.adapters.postgres import PostgresCredentials -from .utils import normalize +from tests.functional.utils import normalize INITIAL_ROOT = os.getcwd() diff --git a/tests/functional/unit_testing/test_csv_fixtures.py b/tests/functional/unit_testing/test_csv_fixtures.py index 4fe4c6ad4..80dfcb154 100644 --- a/tests/functional/unit_testing/test_csv_fixtures.py +++ b/tests/functional/unit_testing/test_csv_fixtures.py @@ -2,7 +2,7 @@ from dbt.tests.util import rm_file, run_dbt, write_file import pytest -from fixtures import ( +from tests.functional.unit_testing.fixtures import ( datetime_test, datetime_test_invalid_csv_values, datetime_test_invalid_format_key, diff --git a/tests/functional/unit_testing/test_state.py b/tests/functional/unit_testing/test_state.py index 1a4ee90bb..f8d8b01fb 100644 --- a/tests/functional/unit_testing/test_state.py +++ b/tests/functional/unit_testing/test_state.py @@ -5,7 +5,7 @@ from dbt.tests.util import run_dbt, write_config_file, write_file import pytest -from fixtures import ( +from tests.functional.unit_testing.fixtures import ( my_model_a_sql, my_model_b_sql, my_model_vars_sql, diff --git a/tests/functional/unit_testing/test_unit_testing.py b/tests/functional/unit_testing/test_unit_testing.py index 4983e01b3..9920098ee 100644 --- a/tests/functional/unit_testing/test_unit_testing.py +++ b/tests/functional/unit_testing/test_unit_testing.py @@ -3,7 +3,7 @@ from dbt.tests.util import get_manifest, run_dbt, write_file import pytest -from fixtures import ( +from tests.functional.unit_testing.fixtures import ( datetime_test, event_sql, my_incremental_model_sql, diff --git a/tests/functional/utils.py b/tests/functional/utils.py index ddfe36785..a0c8cd85f 100644 --- a/tests/functional/utils.py +++ b/tests/functional/utils.py @@ -1,14 +1,28 @@ -import os from contextlib import contextmanager -from typing import Optional +from os import chdir +from os.path import normcase, normpath from pathlib import Path +from typing import Optional @contextmanager def up_one(return_path: Optional[Path] = None): current_path = Path.cwd() - os.chdir("../") + chdir("../") try: yield finally: - os.chdir(return_path or current_path) + chdir(return_path or current_path) + + +def normalize(path): + """On windows, neither is enough on its own: + + >>> normcase('C:\\documents/ALL CAPS/subdir\\..') + 'c:\\documents\\all caps\\subdir\\..' + >>> normpath('C:\\documents/ALL CAPS/subdir\\..') + 'C:\\documents\\ALL CAPS' + >>> normpath(normcase('C:\\documents/ALL CAPS/subdir\\..')) + 'c:\\documents\\all caps' + """ + return normcase(normpath(path)) From a661a1568cc1c954735dfed0fe39903fa8fde8a2 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Tue, 23 Jan 2024 17:46:51 -0500 Subject: [PATCH 011/114] resolve pytest file name collision --- .../{test_configs.py => test_saved_query_configs.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/functional/saved_queries/{test_configs.py => test_saved_query_configs.py} (100%) diff --git a/tests/functional/saved_queries/test_configs.py b/tests/functional/saved_queries/test_saved_query_configs.py similarity index 100% rename from tests/functional/saved_queries/test_configs.py rename to tests/functional/saved_queries/test_saved_query_configs.py From 3e977657e8384b78d47b7d7cddf355d80da69ab9 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Tue, 23 Jan 2024 18:09:36 -0500 Subject: [PATCH 012/114] execute run_dbt through proxy method to set flags --- .../artifacts/test_artifact_fields.py | 4 ++- tests/functional/artifacts/test_artifacts.py | 3 +-- .../artifacts/test_docs_generate_defer.py | 3 ++- tests/functional/artifacts/test_override.py | 3 ++- .../artifacts/test_previous_version_state.py | 4 ++- .../functional/artifacts/test_run_results.py | 2 +- tests/functional/basic/test_basic.py | 4 ++- tests/functional/basic/test_jaffle_shop.py | 3 ++- tests/functional/basic/test_mixed_case_db.py | 4 ++- tests/functional/basic/test_project.py | 4 ++- .../functional/basic/test_simple_reference.py | 4 ++- .../functional/basic/test_varchar_widening.py | 4 ++- tests/functional/build_command/test_build.py | 2 +- tests/functional/cli/test_error_handling.py | 3 ++- tests/functional/cli/test_multioption.py | 3 ++- tests/functional/compile/test_compile.py | 3 ++- .../configs/test_contract_configs.py | 10 +++----- .../context_methods/test_builtin_functions.py | 4 ++- .../context_methods/test_custom_env_vars.py | 3 ++- .../context_methods/test_env_vars.py | 4 ++- .../context_methods/test_secret_env_vars.py | 3 ++- .../contracts/test_contract_precision.py | 3 ++- .../contracts/test_nonstandard_data_type.py | 3 ++- .../defer_state/test_modified_state.py | 9 ++----- .../dependencies/test_local_dependency.py | 8 ++++-- .../partial_parsing/test_partial_parsing.py | 8 +++--- .../partial_parsing/test_pp_vars.py | 8 ++---- tests/functional/postgres/test_indexes.py | 2 +- tests/functional/profiles/test_profile_dir.py | 8 ++---- .../run_operations/test_run_operations.py | 3 +-- tests/functional/show/test_show.py | 2 +- tests/functional/test_access.py | 4 ++- tests/functional/test_analyses.py | 4 ++- tests/functional/test_clean.py | 3 ++- tests/functional/test_colors.py | 3 ++- tests/functional/test_experimental_parser.py | 3 ++- tests/functional/utils.py | 25 ++++++++++++++++++- 37 files changed, 109 insertions(+), 64 deletions(-) diff --git a/tests/functional/artifacts/test_artifact_fields.py b/tests/functional/artifacts/test_artifact_fields.py index 3a2f4a107..cbc679358 100644 --- a/tests/functional/artifacts/test_artifact_fields.py +++ b/tests/functional/artifacts/test_artifact_fields.py @@ -1,6 +1,8 @@ -from dbt.tests.util import get_artifact, get_manifest, run_dbt +from dbt.tests.util import get_artifact, get_manifest import pytest +from tests.functional.utils import run_dbt + # This is a place to put specific tests for contents of artifacts that we # don't want to bother putting in the big artifact output test, which is diff --git a/tests/functional/artifacts/test_artifacts.py b/tests/functional/artifacts/test_artifacts.py index 2fa6be47c..3c182a716 100644 --- a/tests/functional/artifacts/test_artifacts.py +++ b/tests/functional/artifacts/test_artifacts.py @@ -9,8 +9,6 @@ from dbt.tests.util import ( check_datetime_between, get_artifact, - run_dbt, - run_dbt_and_capture, ) import pytest @@ -24,6 +22,7 @@ expected_run_results, expected_versions_run_results, ) +from tests.functional.utils import run_dbt, run_dbt_and_capture models__schema_yml = """ diff --git a/tests/functional/artifacts/test_docs_generate_defer.py b/tests/functional/artifacts/test_docs_generate_defer.py index 0222aa3dc..1357f3847 100644 --- a/tests/functional/artifacts/test_docs_generate_defer.py +++ b/tests/functional/artifacts/test_docs_generate_defer.py @@ -1,9 +1,10 @@ import os import shutil -from dbt.tests.util import run_dbt import pytest +from tests.functional.utils import run_dbt + model_sql = """ select 1 as id diff --git a/tests/functional/artifacts/test_override.py b/tests/functional/artifacts/test_override.py index b97f38198..4352aa1c6 100644 --- a/tests/functional/artifacts/test_override.py +++ b/tests/functional/artifacts/test_override.py @@ -1,7 +1,8 @@ -from dbt.tests.util import run_dbt from dbt_common.exceptions import CompilationError import pytest +from tests.functional.utils import run_dbt + model_sql = """ select 1 as id diff --git a/tests/functional/artifacts/test_previous_version_state.py b/tests/functional/artifacts/test_previous_version_state.py index 0a8f1ebf5..abb041032 100644 --- a/tests/functional/artifacts/test_previous_version_state.py +++ b/tests/functional/artifacts/test_previous_version_state.py @@ -6,9 +6,11 @@ from dbt.artifacts.run import RunResultsArtifact from dbt.contracts.graph.manifest import WritableManifest from dbt.exceptions import IncompatibleSchemaError -from dbt.tests.util import get_manifest, run_dbt +from dbt.tests.util import get_manifest import pytest +from tests.functional.utils import run_dbt + # This project must have one of each kind of node type, plus disabled versions, for # test coverage to be complete. diff --git a/tests/functional/artifacts/test_run_results.py b/tests/functional/artifacts/test_run_results.py index 10934ea98..d8c30a2b7 100644 --- a/tests/functional/artifacts/test_run_results.py +++ b/tests/functional/artifacts/test_run_results.py @@ -2,9 +2,9 @@ from multiprocessing import Process from pathlib import Path -from dbt.tests.util import run_dbt import pytest +from tests.functional.utils import run_dbt good_model_sql = """ select 1 as id diff --git a/tests/functional/basic/test_basic.py b/tests/functional/basic/test_basic.py index 52ce567b0..478023e61 100644 --- a/tests/functional/basic/test_basic.py +++ b/tests/functional/basic/test_basic.py @@ -1,6 +1,8 @@ -from dbt.tests.util import get_manifest, run_dbt +from dbt.tests.util import get_manifest import pytest +from tests.functional.utils import run_dbt + my_model_sql = """ select 1 as fun diff --git a/tests/functional/basic/test_jaffle_shop.py b/tests/functional/basic/test_jaffle_shop.py index 3f4ab0b23..86ccdacc7 100644 --- a/tests/functional/basic/test_jaffle_shop.py +++ b/tests/functional/basic/test_jaffle_shop.py @@ -1,6 +1,7 @@ -from dbt.tests.util import run_dbt, get_manifest, run_dbt_and_capture, write_file +from dbt.tests.util import get_manifest, write_file from tests.functional.projects import JaffleShop +from tests.functional.utils import run_dbt, run_dbt_and_capture class TestBasic(JaffleShop): diff --git a/tests/functional/basic/test_mixed_case_db.py b/tests/functional/basic/test_mixed_case_db.py index e41fd39f9..c6f318ce7 100644 --- a/tests/functional/basic/test_mixed_case_db.py +++ b/tests/functional/basic/test_mixed_case_db.py @@ -1,6 +1,8 @@ -from dbt.tests.util import get_manifest, run_dbt +from dbt.tests.util import get_manifest import pytest +from tests.functional.utils import run_dbt + model_sql = """ select 1 as id diff --git a/tests/functional/basic/test_project.py b/tests/functional/basic/test_project.py index 562c3db26..9c8034cfc 100644 --- a/tests/functional/basic/test_project.py +++ b/tests/functional/basic/test_project.py @@ -1,11 +1,13 @@ import os from pathlib import Path -from dbt.tests.util import run_dbt, update_config_file, write_config_file +from dbt.tests.util import update_config_file, write_config_file from dbt.exceptions import ProjectContractError import pytest import yaml +from tests.functional.utils import run_dbt + simple_model_sql = """ select true as my_column diff --git a/tests/functional/basic/test_simple_reference.py b/tests/functional/basic/test_simple_reference.py index 5de7f8df9..f67ac5e88 100644 --- a/tests/functional/basic/test_simple_reference.py +++ b/tests/functional/basic/test_simple_reference.py @@ -1,6 +1,8 @@ -from dbt.tests.util import check_relations_equal, copy_file, read_file, run_dbt +from dbt.tests.util import check_relations_equal, copy_file, read_file import pytest +from tests.functional.utils import run_dbt + ephemeral_copy_sql = """ {{ diff --git a/tests/functional/basic/test_varchar_widening.py b/tests/functional/basic/test_varchar_widening.py index 11d424eb4..51b5f732f 100644 --- a/tests/functional/basic/test_varchar_widening.py +++ b/tests/functional/basic/test_varchar_widening.py @@ -1,8 +1,10 @@ import os -from dbt.tests.util import check_relations_equal, run_dbt +from dbt.tests.util import check_relations_equal import pytest +from tests.functional.utils import run_dbt + incremental_sql = """ {{ diff --git a/tests/functional/build_command/test_build.py b/tests/functional/build_command/test_build.py index a14b93ebc..2bf65274d 100644 --- a/tests/functional/build_command/test_build.py +++ b/tests/functional/build_command/test_build.py @@ -1,7 +1,7 @@ -from dbt.tests.util import run_dbt import pytest from tests.functional.build_command import fixtures +from tests.functional.utils import run_dbt class TestBuildBase: diff --git a/tests/functional/cli/test_error_handling.py b/tests/functional/cli/test_error_handling.py index 0fa6c2950..c70d789cf 100644 --- a/tests/functional/cli/test_error_handling.py +++ b/tests/functional/cli/test_error_handling.py @@ -1,6 +1,7 @@ -from dbt.tests.util import run_dbt import pytest +from tests.functional.utils import run_dbt + model_one_sql = """ someting bad diff --git a/tests/functional/cli/test_multioption.py b/tests/functional/cli/test_multioption.py index 0de80a4f2..6246733fc 100644 --- a/tests/functional/cli/test_multioption.py +++ b/tests/functional/cli/test_multioption.py @@ -1,6 +1,7 @@ -from dbt.tests.util import run_dbt import pytest +from tests.functional.utils import run_dbt + model_one_sql = """ select 1 as fun diff --git a/tests/functional/compile/test_compile.py b/tests/functional/compile/test_compile.py index 4e6b4fe1d..875193a3d 100644 --- a/tests/functional/compile/test_compile.py +++ b/tests/functional/compile/test_compile.py @@ -2,12 +2,13 @@ import pathlib import re -from dbt.tests.util import read_file, run_dbt, run_dbt_and_capture +from dbt.tests.util import read_file from dbt_common.exceptions import DbtBaseException, DbtRuntimeError import pytest from tests.functional.compile import fixtures from tests.functional.dbt_runner import dbtTestRunner +from tests.functional.utils import run_dbt, run_dbt_and_capture def norm_whitespace(string): diff --git a/tests/functional/configs/test_contract_configs.py b/tests/functional/configs/test_contract_configs.py index a7f4e35c8..fae946160 100644 --- a/tests/functional/configs/test_contract_configs.py +++ b/tests/functional/configs/test_contract_configs.py @@ -1,16 +1,12 @@ import os from dbt.exceptions import ParsingError -from dbt.tests.util import ( - get_artifact, - get_manifest, - run_dbt, - run_dbt_and_capture, - write_file, -) +from dbt.tests.util import get_artifact, get_manifest, write_file from dbt_common.exceptions import ValidationError import pytest +from tests.functional.utils import run_dbt, run_dbt_and_capture + my_model_sql = """ {{ diff --git a/tests/functional/context_methods/test_builtin_functions.py b/tests/functional/context_methods/test_builtin_functions.py index 1bec64c9e..49192f3a5 100644 --- a/tests/functional/context_methods/test_builtin_functions.py +++ b/tests/functional/context_methods/test_builtin_functions.py @@ -1,10 +1,12 @@ import json import os -from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file +from dbt.tests.util import write_file from dbt_common.exceptions import CompilationError import pytest +from tests.functional.utils import run_dbt, run_dbt_and_capture + macros__validate_set_sql = """ {% macro validate_set() %} diff --git a/tests/functional/context_methods/test_custom_env_vars.py b/tests/functional/context_methods/test_custom_env_vars.py index 93de2b664..50a9b00c7 100644 --- a/tests/functional/context_methods/test_custom_env_vars.py +++ b/tests/functional/context_methods/test_custom_env_vars.py @@ -1,9 +1,10 @@ import json import os -from dbt.tests.util import run_dbt_and_capture import pytest +from tests.functional.utils import run_dbt_and_capture + def parse_json_logs(json_log_output): parsed_logs = [] diff --git a/tests/functional/context_methods/test_env_vars.py b/tests/functional/context_methods/test_env_vars.py index e852199d8..0bfbd01c4 100644 --- a/tests/functional/context_methods/test_env_vars.py +++ b/tests/functional/context_methods/test_env_vars.py @@ -1,9 +1,11 @@ import os from dbt.constants import DEFAULT_ENV_PLACEHOLDER, SECRET_ENV_PREFIX -from dbt.tests.util import get_manifest, run_dbt, run_dbt_and_capture +from dbt.tests.util import get_manifest import pytest +from tests.functional.utils import run_dbt, run_dbt_and_capture + context_sql = """ diff --git a/tests/functional/context_methods/test_secret_env_vars.py b/tests/functional/context_methods/test_secret_env_vars.py index 8fe8cbf7b..a6a5537a7 100644 --- a/tests/functional/context_methods/test_secret_env_vars.py +++ b/tests/functional/context_methods/test_secret_env_vars.py @@ -2,11 +2,12 @@ from dbt.constants import SECRET_ENV_PREFIX from dbt.exceptions import ParsingError -from dbt.tests.util import read_file, run_dbt, run_dbt_and_capture +from dbt.tests.util import read_file from dbt_common.exceptions import DbtInternalError import pytest from tests.functional.context_methods.first_dependency import FirstDependencyProject +from tests.functional.utils import run_dbt, run_dbt_and_capture secret_bad__context_sql = """ diff --git a/tests/functional/contracts/test_contract_precision.py b/tests/functional/contracts/test_contract_precision.py index d1c71ba18..a59983b3c 100644 --- a/tests/functional/contracts/test_contract_precision.py +++ b/tests/functional/contracts/test_contract_precision.py @@ -1,6 +1,7 @@ -from dbt.tests.util import run_dbt_and_capture import pytest +from tests.functional.utils import run_dbt_and_capture + my_numeric_model_sql = """ select diff --git a/tests/functional/contracts/test_nonstandard_data_type.py b/tests/functional/contracts/test_nonstandard_data_type.py index 4233747fb..ee48bb3cd 100644 --- a/tests/functional/contracts/test_nonstandard_data_type.py +++ b/tests/functional/contracts/test_nonstandard_data_type.py @@ -1,6 +1,7 @@ -from dbt.tests.util import run_dbt, run_dbt_and_capture import pytest +from tests.functional.utils import run_dbt, run_dbt_and_capture + my_numeric_model_sql = """ select diff --git a/tests/functional/defer_state/test_modified_state.py b/tests/functional/defer_state/test_modified_state.py index 4b07919c5..e108fe9f4 100644 --- a/tests/functional/defer_state/test_modified_state.py +++ b/tests/functional/defer_state/test_modified_state.py @@ -4,17 +4,12 @@ import string from dbt.exceptions import ContractBreakingChangeError -from dbt.tests.util import ( - get_manifest, - run_dbt, - run_dbt_and_capture, - update_config_file, - write_file, -) +from dbt.tests.util import get_manifest, update_config_file, write_file from dbt_common.exceptions import CompilationError import pytest from tests.functional.defer_state import fixtures +from tests.functional.utils import run_dbt, run_dbt_and_capture class BaseModifiedState: diff --git a/tests/functional/dependencies/test_local_dependency.py b/tests/functional/dependencies/test_local_dependency.py index 6dcd7b4fa..a4a42d1b7 100644 --- a/tests/functional/dependencies/test_local_dependency.py +++ b/tests/functional/dependencies/test_local_dependency.py @@ -5,13 +5,17 @@ from unittest import mock from dbt.exceptions import DbtProjectError, DependencyError -from dbt.tests.util import check_relations_equal, run_dbt, run_dbt_and_capture +from dbt.tests.util import check_relations_equal from dbt_common.exceptions import CompilationError, DbtRuntimeError import dbt_common.semver as semver import pytest import yaml -from tests.functional.utils import up_one +from tests.functional.utils import ( + run_dbt, + run_dbt_and_capture, + up_one, +) models__dep_source = """ diff --git a/tests/functional/partial_parsing/test_partial_parsing.py b/tests/functional/partial_parsing/test_partial_parsing.py index 15b89a4c0..ee7851042 100644 --- a/tests/functional/partial_parsing/test_partial_parsing.py +++ b/tests/functional/partial_parsing/test_partial_parsing.py @@ -7,8 +7,6 @@ get_manifest, rename_dir, rm_file, - run_dbt, - run_dbt_and_capture, write_file, ) from dbt.contracts.files import ParseFileType @@ -75,7 +73,11 @@ test_macro_sql, test_macro2_sql, ) -from tests.functional.utils import up_one +from tests.functional.utils import ( + run_dbt, + run_dbt_and_capture, + up_one, +) os.environ["DBT_PP_TEST"] = "true" diff --git a/tests/functional/partial_parsing/test_pp_vars.py b/tests/functional/partial_parsing/test_pp_vars.py index cb10a7cd2..2976c1990 100644 --- a/tests/functional/partial_parsing/test_pp_vars.py +++ b/tests/functional/partial_parsing/test_pp_vars.py @@ -4,12 +4,7 @@ from dbt.adapters.exceptions import FailedToConnectError from dbt.constants import SECRET_ENV_PREFIX from dbt.exceptions import ParsingError -from dbt.tests.util import ( - get_manifest, - run_dbt, - run_dbt_and_capture, - write_file, -) +from dbt.tests.util import get_manifest, write_file import pytest from tests.functional.partial_parsing.fixtures import ( @@ -31,6 +26,7 @@ raw_customers_csv, test_color_sql, ) +from tests.functional.utils import run_dbt, run_dbt_and_capture os.environ["DBT_PP_TEST"] = "true" diff --git a/tests/functional/postgres/test_indexes.py b/tests/functional/postgres/test_indexes.py index cf706fb83..269917c93 100644 --- a/tests/functional/postgres/test_indexes.py +++ b/tests/functional/postgres/test_indexes.py @@ -1,6 +1,5 @@ import re -from dbt.tests.util import run_dbt, run_dbt_and_capture import pytest from tests.functional.postgres.fixtures import ( @@ -13,6 +12,7 @@ seeds__seed_csv, snapshots__colors_sql, ) +from tests.functional.utils import run_dbt, run_dbt_and_capture INDEX_DEFINITION_PATTERN = re.compile(r"using\s+(\w+)\s+\((.+)\)\Z") diff --git a/tests/functional/profiles/test_profile_dir.py b/tests/functional/profiles/test_profile_dir.py index fbb39ed9c..282c978c9 100644 --- a/tests/functional/profiles/test_profile_dir.py +++ b/tests/functional/profiles/test_profile_dir.py @@ -2,15 +2,11 @@ import os from pathlib import Path +from dbt.tests.util import rm_file, write_file import pytest import yaml -from dbt.tests.util import ( - rm_file, - run_dbt, - run_dbt_and_capture, - write_file, -) +from tests.functional.utils import run_dbt, run_dbt_and_capture @pytest.fixture(scope="class") diff --git a/tests/functional/run_operations/test_run_operations.py b/tests/functional/run_operations/test_run_operations.py index ea077dcf2..f5d019ff4 100644 --- a/tests/functional/run_operations/test_run_operations.py +++ b/tests/functional/run_operations/test_run_operations.py @@ -5,8 +5,6 @@ mkdir, rm_dir, rm_file, - run_dbt, - run_dbt_and_capture, write_file, ) from dbt_common.exceptions import DbtInternalError @@ -18,6 +16,7 @@ model_sql, sad_macros_sql, ) +from tests.functional.utils import run_dbt, run_dbt_and_capture class TestOperations: diff --git a/tests/functional/show/test_show.py b/tests/functional/show/test_show.py index 5eb711270..68aacf5df 100644 --- a/tests/functional/show/test_show.py +++ b/tests/functional/show/test_show.py @@ -1,4 +1,3 @@ -from dbt.tests.util import run_dbt, run_dbt_and_capture from dbt_common.exceptions import DbtBaseException, DbtRuntimeError import pytest @@ -13,6 +12,7 @@ schema_yml, seeds__sample_seed, ) +from tests.functional.utils import run_dbt, run_dbt_and_capture class ShowBase: diff --git a/tests/functional/test_access.py b/tests/functional/test_access.py index c53ca28ef..2db09a1f8 100644 --- a/tests/functional/test_access.py +++ b/tests/functional/test_access.py @@ -1,9 +1,11 @@ from dbt.exceptions import DbtReferenceError, InvalidAccessTypeError from dbt.node_types import AccessType from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file +from dbt.tests.util import get_manifest, rm_file, write_file import pytest +from tests.functional.utils import run_dbt + my_model_sql = "select 1 as fun" diff --git a/tests/functional/test_analyses.py b/tests/functional/test_analyses.py index aa71d1c69..f3b7d5517 100644 --- a/tests/functional/test_analyses.py +++ b/tests/functional/test_analyses.py @@ -1,8 +1,10 @@ import os -from dbt.tests.util import get_manifest, run_dbt +from dbt.tests.util import get_manifest import pytest +from tests.functional.utils import run_dbt + my_model_sql = """ select 1 as id diff --git a/tests/functional/test_clean.py b/tests/functional/test_clean.py index 51cdbcbd2..f76ecea4e 100644 --- a/tests/functional/test_clean.py +++ b/tests/functional/test_clean.py @@ -1,7 +1,8 @@ -from dbt.tests.util import run_dbt from dbt_common.exceptions import DbtRuntimeError import pytest +from tests.functional.utils import run_dbt + class TestCleanSourcePath: @pytest.fixture(scope="class") diff --git a/tests/functional/test_colors.py b/tests/functional/test_colors.py index 3d6451462..311ae93ac 100644 --- a/tests/functional/test_colors.py +++ b/tests/functional/test_colors.py @@ -1,8 +1,9 @@ import re -from dbt.tests.util import run_dbt_and_capture import pytest +from tests.functional.utils import run_dbt_and_capture + models__do_nothing_then_fail_sql = """ select 1, diff --git a/tests/functional/test_experimental_parser.py b/tests/functional/test_experimental_parser.py index b30119147..18ee85257 100644 --- a/tests/functional/test_experimental_parser.py +++ b/tests/functional/test_experimental_parser.py @@ -2,9 +2,10 @@ from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import RefArgs -from dbt.tests.util import run_dbt, run_dbt_and_capture import pytest +from tests.functional.utils import run_dbt, run_dbt_and_capture + def get_manifest(): path = "./target/partial_parse.msgpack" diff --git a/tests/functional/utils.py b/tests/functional/utils.py index a0c8cd85f..af6427a3c 100644 --- a/tests/functional/utils.py +++ b/tests/functional/utils.py @@ -2,7 +2,12 @@ from os import chdir from os.path import normcase, normpath from pathlib import Path -from typing import Optional +from typing import List, Optional + +from dbt.tests.util import ( + run_dbt as _run_dbt, + run_dbt_and_capture as _run_dbt_and_capture, +) @contextmanager @@ -26,3 +31,21 @@ def normalize(path): 'c:\\documents\\all caps' """ return normcase(normpath(path)) + + +def run_dbt(args: Optional[List[str]] = None, expect_pass: bool = True): + _set_flags() + return _run_dbt(args, expect_pass) + + +def run_dbt_and_capture(args: Optional[List[str]] = None, expect_pass: bool = True): + _set_flags() + return _run_dbt_and_capture(args, expect_pass) + + +def _set_flags(): + # in order to call dbt's internal profile rendering, we need to set the + # flags global. This is a bit of a hack, but it's the best way to do it. + from dbt.flags import set_from_args + from argparse import Namespace + set_from_args(Namespace(), None) From 22090089c5ab4074b7f5ad98ca95ccfaaf181576 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Tue, 23 Jan 2024 18:42:26 -0500 Subject: [PATCH 013/114] fix build checks --- pyproject.toml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 68b2c526c..aa517bd5e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -124,13 +124,13 @@ check-all = [ ] check-wheel = [ "twine check dist/*", - "find ./dist/dbt_adapters-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", - "pip freeze | grep dbt-adapters", + "find ./dist/dbt_postgres-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-postgres", ] check-sdist = [ "check-wheel-contents dist/*.whl --ignore W007,W008", - "find ./dist/dbt_adapters-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", - "pip freeze | grep dbt-adapters", + "find ./dist/dbt_postgres-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-postgres", ] [tool.black] line-length = 99 From b5263933fd769d832fd4a817997d727bcf5f371f Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Wed, 24 Jan 2024 11:38:40 -0500 Subject: [PATCH 014/114] add connection config to functional tests --- test.env.example | 6 ++++++ tests/functional/conftest.py | 18 +++++++++++++++--- 2 files changed, 21 insertions(+), 3 deletions(-) create mode 100644 test.env.example diff --git a/test.env.example b/test.env.example new file mode 100644 index 000000000..b4d04412d --- /dev/null +++ b/test.env.example @@ -0,0 +1,6 @@ +POSTGRES_TEST_HOST= +POSTGRES_TEST_PORT= +POSTGRES_TEST_USER= +POSTGRES_TEST_PASS= +POSTGRES_TEST_DATABASE= +POSTGRES_TEST_THREADS= diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py index 7620a954a..f0f95c9a3 100644 --- a/tests/functional/conftest.py +++ b/tests/functional/conftest.py @@ -1,11 +1,23 @@ +import os + import pytest from tests.functional.projects import dbt_integration -pytest_plugins = ["dbt.tests.fixtures.project"] - - @pytest.fixture(scope="class") def dbt_integration_project(): return dbt_integration() + + +@pytest.fixture(scope="class") +def dbt_profile_target(): + return { + "type": "postgres", + "host": os.getenv("POSTGRES_TEST_HOST", "localhost"), + "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), + "user": os.getenv("POSTGRES_TEST_USER", "root"), + "pass": os.getenv("POSTGRES_TEST_PASS", "password"), + "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), + "threads": int(os.getenv("POSTGRES_TEST_THREADS", 4)), + } From 449c88ee55b8682c5aeef2f1830aba36b361532f Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Wed, 24 Jan 2024 11:39:14 -0500 Subject: [PATCH 015/114] fix relative import --- tests/unit/test_postgres_adapter.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/tests/unit/test_postgres_adapter.py b/tests/unit/test_postgres_adapter.py index 9278620f9..57e0942e4 100644 --- a/tests/unit/test_postgres_adapter.py +++ b/tests/unit/test_postgres_adapter.py @@ -17,8 +17,7 @@ from psycopg2 import DatabaseError, extensions as psycopg2_extensions from dbt.adapters.postgres import Plugin as PostgresPlugin, PostgresAdapter - -from utils import ( +from tests.unit.utils import ( TestAdapterConversions, clear_plugin, config_from_parts_or_dicts, @@ -28,9 +27,6 @@ ) -# set_from_args(Namespace(WARN_ERROR=False), None) - - class TestPostgresAdapter(TestCase): def setUp(self): project_cfg = { From c4fa2dd7f38f7977bbb83eddfba15573100c8a74 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Wed, 24 Jan 2024 11:39:49 -0500 Subject: [PATCH 016/114] move globalization of flags into base conftest --- tests/conftest.py | 8 ++++ tests/unit/utils.py | 113 ++++++++++++++++++++------------------------ 2 files changed, 59 insertions(+), 62 deletions(-) create mode 100644 tests/conftest.py diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 000000000..d433a8b02 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,8 @@ +# in order to call dbt's internal profile rendering, we need to set the +# flags global. This is a bit of a hack, but it's the best way to do it. +from dbt.flags import set_from_args +from argparse import Namespace + +set_from_args(Namespace(), None) + +pytest_plugins = "dbt.tests.fixtures.project" diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 1abb56cb0..96fff0944 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -1,3 +1,4 @@ +from argparse import Namespace import os import string from unittest import TestCase, mock @@ -6,11 +7,6 @@ from dbt.config.project import PartialProject -class Obj: - which = "blah" - single_threaded = False - - def mock_connection(name, state="open"): conn = mock.MagicMock() conn.name = name @@ -18,7 +14,41 @@ def mock_connection(name, state="open"): return conn -def profile_from_dict(profile, profile_name, cli_vars="{}"): +def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars={}): + from dbt.config import Project, Profile, RuntimeConfig + from copy import deepcopy + + if isinstance(project, Project): + profile_name = project.profile_name + else: + profile_name = project.get("profile") + + if not isinstance(profile, Profile): + profile = _profile_from_dict( + deepcopy(profile), + profile_name, + cli_vars, + ) + + if not isinstance(project, Project): + project = _project_from_dict( + deepcopy(project), + profile, + packages, + selectors, + cli_vars, + ) + + args = Namespace( + which="blah", + single_threaded=False, + vars=cli_vars, + profile_dir="/dev/null", + ) + return RuntimeConfig.from_parts(project=project, profile=profile, args=args) + + +def _profile_from_dict(profile, profile_name, cli_vars="{}"): from dbt.config import Profile from dbt.config.renderer import ProfileRenderer from dbt.config.utils import parse_cli_vars @@ -28,12 +58,6 @@ def profile_from_dict(profile, profile_name, cli_vars="{}"): renderer = ProfileRenderer(cli_vars) - # in order to call dbt's internal profile rendering, we need to set the - # flags global. This is a bit of a hack, but it's the best way to do it. - from dbt.flags import set_from_args - from argparse import Namespace - - set_from_args(Namespace(), None) return Profile.from_raw_profile_info( profile, profile_name, @@ -41,81 +65,46 @@ def profile_from_dict(profile, profile_name, cli_vars="{}"): ) -def project_from_dict(project, profile, packages=None, selectors=None, cli_vars="{}"): +def _project_from_dict(project, profile, packages=None, selectors=None, cli_vars="{}"): from dbt.config.renderer import DbtProjectYamlRenderer from dbt.config.utils import parse_cli_vars - if not isinstance(cli_vars, dict): - cli_vars = parse_cli_vars(cli_vars) - - renderer = DbtProjectYamlRenderer(profile, cli_vars) - project_root = project.pop("project-root", os.getcwd()) - partial = PartialProject.from_dicts( project_root=project_root, project_dict=project, packages_dict=packages, selectors_dict=selectors, ) - return partial.render(renderer) - - -def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars={}): - from dbt.config import Project, Profile, RuntimeConfig - from copy import deepcopy - - if isinstance(project, Project): - profile_name = project.profile_name - else: - profile_name = project.get("profile") - - if not isinstance(profile, Profile): - profile = profile_from_dict( - deepcopy(profile), - profile_name, - cli_vars, - ) - - if not isinstance(project, Project): - project = project_from_dict( - deepcopy(project), - profile, - packages, - selectors, - cli_vars, - ) - - args = Obj() - args.vars = cli_vars - args.profile_dir = "/dev/null" - return RuntimeConfig.from_parts(project=project, profile=profile, args=args) + if not isinstance(cli_vars, dict): + cli_vars = parse_cli_vars(cli_vars) -def inject_plugin(plugin): - from dbt.adapters.factory import FACTORY + renderer = DbtProjectYamlRenderer(profile, cli_vars) + project = partial.render(renderer) - key = plugin.adapter.type() - FACTORY.plugins[key] = plugin + return project -def inject_adapter(value, plugin): +def inject_adapter(adapter, plugin): """Inject the given adapter into the adapter factory, so your hand-crafted artisanal adapter will be available from get_adapter() as if dbt loaded it. """ - inject_plugin(plugin) from dbt.adapters.factory import FACTORY - key = value.type() - FACTORY.adapters[key] = value + plugin_key = plugin.adapter.type() + FACTORY.plugins[plugin_key] = plugin + + adapter_key = adapter.type() + FACTORY.adapters[adapter_key] = adapter def clear_plugin(plugin): from dbt.adapters.factory import FACTORY - key = plugin.adapter.type() - FACTORY.plugins.pop(key, None) - FACTORY.adapters.pop(key, None) + adapter_key = plugin.adapter.type() + FACTORY.plugins.pop(adapter_key, None) + FACTORY.adapters.pop(adapter_key, None) class TestAdapterConversions(TestCase): From 91a522c5fff5f645012ebbcfec25999a7d34b433 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Wed, 24 Jan 2024 11:51:39 -0500 Subject: [PATCH 017/114] formatting --- tests/functional/basic/test_basic.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/tests/functional/basic/test_basic.py b/tests/functional/basic/test_basic.py index 478023e61..d2c60b161 100644 --- a/tests/functional/basic/test_basic.py +++ b/tests/functional/basic/test_basic.py @@ -4,14 +4,9 @@ from tests.functional.utils import run_dbt -my_model_sql = """ - select 1 as fun -""" - - @pytest.fixture(scope="class") def models(): - return {"my_model.sql": my_model_sql} + return {"my_model.sql": "select 1 as fun"} def test_basic(project): From 4438484f7049815c907ee5a2c827a26b5fe38ab6 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Wed, 24 Jan 2024 12:07:28 -0500 Subject: [PATCH 018/114] break apart unit tests into separate modules --- ...st_postgres_adapter.py => test_adapter.py} | 307 +----------------- tests/unit/test_adapter_conversions.py | 106 ++++++ tests/unit/test_connection.py | 216 ++++++++++++ tests/unit/test_filter_catalog.py | 28 ++ tests/unit/utils.py | 35 +- 5 files changed, 358 insertions(+), 334 deletions(-) rename tests/unit/{test_postgres_adapter.py => test_adapter.py} (52%) create mode 100644 tests/unit/test_adapter_conversions.py create mode 100644 tests/unit/test_connection.py create mode 100644 tests/unit/test_filter_catalog.py diff --git a/tests/unit/test_postgres_adapter.py b/tests/unit/test_adapter.py similarity index 52% rename from tests/unit/test_postgres_adapter.py rename to tests/unit/test_adapter.py index 57e0942e4..0a7ed534e 100644 --- a/tests/unit/test_postgres_adapter.py +++ b/tests/unit/test_adapter.py @@ -1,28 +1,16 @@ import dataclasses -import decimal from multiprocessing import get_context from unittest import TestCase, mock import agate -import pytest from dbt.adapters.base import BaseRelation from dbt.adapters.contracts.relation import Path -from dbt.context.manifest import generate_query_header_context -from dbt.context.providers import generate_runtime_macro_context -from dbt.contracts.files import FileHash -from dbt.contracts.graph.manifest import ManifestStateCheck -from dbt.task.debug import DebugTask -from dbt_common.clients import agate_helper -from dbt_common.exceptions import DbtConfigError, DbtValidationError -from psycopg2 import DatabaseError, extensions as psycopg2_extensions +from dbt_common.exceptions import DbtValidationError from dbt.adapters.postgres import Plugin as PostgresPlugin, PostgresAdapter from tests.unit.utils import ( - TestAdapterConversions, - clear_plugin, config_from_parts_or_dicts, inject_adapter, - load_internal_manifest_macros, mock_connection, ) @@ -361,296 +349,3 @@ def catalog_test(self, mock_get_relations, mock_execute, filtered=False): self.assertEqual(exceptions, []) - -class TestConnectingPostgresAdapter(TestCase): - def setUp(self): - self.target_dict = { - "type": "postgres", - "dbname": "postgres", - "user": "root", - "host": "thishostshouldnotexist", - "pass": "password", - "port": 5432, - "schema": "public", - } - - profile_cfg = { - "outputs": { - "test": self.target_dict, - }, - "target": "test", - } - project_cfg = { - "name": "X", - "version": "0.1", - "profile": "test", - "project-root": "/tmp/dbt/does-not-exist", - "quoting": { - "identifier": False, - "schema": True, - }, - "config-version": 2, - } - - self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) - self.mp_context = get_context("spawn") - - self.handle = mock.MagicMock(spec=psycopg2_extensions.connection) - self.cursor = self.handle.cursor.return_value - self.mock_execute = self.cursor.execute - self.patcher = mock.patch("dbt.adapters.postgres.connections.psycopg2") - self.psycopg2 = self.patcher.start() - - # Create the Manifest.state_check patcher - @mock.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") - def _mock_state_check(self): - all_projects = self.all_projects - return ManifestStateCheck( - vars_hash=FileHash.from_contents("vars"), - project_hashes={name: FileHash.from_contents(name) for name in all_projects}, - profile_hash=FileHash.from_contents("profile"), - ) - - self.load_state_check = mock.patch( - "dbt.parser.manifest.ManifestLoader.build_manifest_state_check" - ) - self.mock_state_check = self.load_state_check.start() - self.mock_state_check.side_effect = _mock_state_check - - self.psycopg2.connect.return_value = self.handle - self.adapter = PostgresAdapter(self.config, self.mp_context) - self.adapter.set_macro_resolver(load_internal_manifest_macros(self.config)) - self.adapter.set_macro_context_generator(generate_runtime_macro_context) - self.adapter.connections.set_query_header( - generate_query_header_context(self.config, self.adapter.get_macro_resolver()) - ) - self.qh_patch = mock.patch.object(self.adapter.connections.query_header, "add") - self.mock_query_header_add = self.qh_patch.start() - self.mock_query_header_add.side_effect = lambda q: "/* dbt */\n{}".format(q) - self.adapter.acquire_connection() - inject_adapter(self.adapter, PostgresPlugin) - - def tearDown(self): - # we want a unique self.handle every time. - self.adapter.cleanup_connections() - self.qh_patch.stop() - self.patcher.stop() - self.load_state_check.stop() - clear_plugin(PostgresPlugin) - - def test_quoting_on_drop_schema(self): - relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - quote_policy=self.adapter.config.quoting, - ) - self.adapter.drop_schema(relation) - - self.mock_execute.assert_has_calls( - [mock.call('/* dbt */\ndrop schema if exists "test_schema" cascade', None)] - ) - - def test_quoting_on_drop(self): - relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="test_table", - type="table", - quote_policy=self.adapter.config.quoting, - ) - self.adapter.drop_relation(relation) - self.mock_execute.assert_has_calls( - [ - mock.call( - '/* dbt */\ndrop table if exists "postgres"."test_schema".test_table cascade', - None, - ) - ] - ) - - def test_quoting_on_truncate(self): - relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="test_table", - type="table", - quote_policy=self.adapter.config.quoting, - ) - self.adapter.truncate_relation(relation) - self.mock_execute.assert_has_calls( - [mock.call('/* dbt */\ntruncate table "postgres"."test_schema".test_table', None)] - ) - - def test_quoting_on_rename(self): - from_relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="table_a", - type="table", - quote_policy=self.adapter.config.quoting, - ) - to_relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="table_b", - type="table", - quote_policy=self.adapter.config.quoting, - ) - - self.adapter.rename_relation(from_relation=from_relation, to_relation=to_relation) - self.mock_execute.assert_has_calls( - [ - mock.call( - '/* dbt */\nalter table "postgres"."test_schema".table_a rename to table_b', - None, - ) - ] - ) - - @pytest.mark.skip(""" - We moved from __version__ to __about__ when establishing `hatch` as our build tool. - However, `adapters.factory.register_adapter` assumes __version__ when determining - the adapter version. This test causes an import error - """) - def test_debug_connection_ok(self): - DebugTask.validate_connection(self.target_dict) - self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) - - def test_debug_connection_fail_nopass(self): - del self.target_dict["pass"] - with self.assertRaises(DbtConfigError): - DebugTask.validate_connection(self.target_dict) - - @pytest.mark.skip(""" - We moved from __version__ to __about__ when establishing `hatch` as our build tool. - However, `adapters.factory.register_adapter` assumes __version__ when determining - the adapter version. This test causes an import error - """) - def test_connection_fail_select(self): - self.mock_execute.side_effect = DatabaseError() - with self.assertRaises(DbtConfigError): - DebugTask.validate_connection(self.target_dict) - self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) - - def test_dbname_verification_is_case_insensitive(self): - # Override adapter settings from setUp() - self.target_dict["dbname"] = "Postgres" - profile_cfg = { - "outputs": { - "test": self.target_dict, - }, - "target": "test", - } - project_cfg = { - "name": "X", - "version": "0.1", - "profile": "test", - "project-root": "/tmp/dbt/does-not-exist", - "quoting": { - "identifier": False, - "schema": True, - }, - "config-version": 2, - } - self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) - self.mp_context = get_context("spawn") - self.adapter.cleanup_connections() - self._adapter = PostgresAdapter(self.config, self.mp_context) - self.adapter.verify_database("postgres") - - -class TestPostgresFilterCatalog(TestCase): - def test__catalog_filter_table(self): - used_schemas = [["a", "B"], ["a", "1234"]] - column_names = ["table_name", "table_database", "table_schema", "something"] - rows = [ - ["foo", "a", "b", "1234"], # include - ["foo", "a", "1234", "1234"], # include, w/ table schema as str - ["foo", "c", "B", "1234"], # skip - ["1234", "A", "B", "1234"], # include, w/ table name as str - ] - table = agate.Table(rows, column_names, agate_helper.DEFAULT_TYPE_TESTER) - - result = PostgresAdapter._catalog_filter_table(table, used_schemas) - assert len(result) == 3 - for row in result.rows: - assert isinstance(row["table_schema"], str) - assert isinstance(row["table_database"], str) - assert isinstance(row["table_name"], str) - assert isinstance(row["something"], decimal.Decimal) - - -class TestPostgresAdapterConversions(TestAdapterConversions): - def test_convert_text_type(self): - rows = [ - ["", "a1", "stringval1"], - ["", "a2", "stringvalasdfasdfasdfa"], - ["", "a3", "stringval3"], - ] - agate_table = self._make_table_of(rows, agate.Text) - expected = ["text", "text", "text"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_text_type(agate_table, col_idx) == expect - - def test_convert_number_type(self): - rows = [ - ["", "23.98", "-1"], - ["", "12.78", "-2"], - ["", "79.41", "-3"], - ] - agate_table = self._make_table_of(rows, agate.Number) - expected = ["integer", "float8", "integer"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_number_type(agate_table, col_idx) == expect - - def test_convert_boolean_type(self): - rows = [ - ["", "false", "true"], - ["", "false", "false"], - ["", "false", "true"], - ] - agate_table = self._make_table_of(rows, agate.Boolean) - expected = ["boolean", "boolean", "boolean"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_boolean_type(agate_table, col_idx) == expect - - def test_convert_datetime_type(self): - rows = [ - ["", "20190101T01:01:01Z", "2019-01-01 01:01:01"], - ["", "20190102T01:01:01Z", "2019-01-01 01:01:01"], - ["", "20190103T01:01:01Z", "2019-01-01 01:01:01"], - ] - agate_table = self._make_table_of( - rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime] - ) - expected = [ - "timestamp without time zone", - "timestamp without time zone", - "timestamp without time zone", - ] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_datetime_type(agate_table, col_idx) == expect - - def test_convert_date_type(self): - rows = [ - ["", "2019-01-01", "2019-01-04"], - ["", "2019-01-02", "2019-01-04"], - ["", "2019-01-03", "2019-01-04"], - ] - agate_table = self._make_table_of(rows, agate.Date) - expected = ["date", "date", "date"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_date_type(agate_table, col_idx) == expect - - def test_convert_time_type(self): - # dbt's default type testers actually don't have a TimeDelta at all. - agate.TimeDelta - rows = [ - ["", "120s", "10s"], - ["", "3m", "11s"], - ["", "1h", "12s"], - ] - agate_table = self._make_table_of(rows, agate.TimeDelta) - expected = ["time", "time", "time"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_time_type(agate_table, col_idx) == expect diff --git a/tests/unit/test_adapter_conversions.py b/tests/unit/test_adapter_conversions.py new file mode 100644 index 000000000..043f0166c --- /dev/null +++ b/tests/unit/test_adapter_conversions.py @@ -0,0 +1,106 @@ +import string +from unittest import TestCase + +import agate +from dbt_common.clients import agate_helper + +from dbt.adapters.postgres import PostgresAdapter + + +class AdapterConversions(TestCase): + @staticmethod + def _get_tester_for(column_type): + from dbt_common.clients import agate_helper + + if column_type is agate.TimeDelta: # dbt never makes this! + return agate.TimeDelta() + + for instance in agate_helper.DEFAULT_TYPE_TESTER._possible_types: + if isinstance(instance, column_type): # include child types + return instance + + raise ValueError(f"no tester for {column_type}") + + def _make_table_of(self, rows, column_types): + column_names = list(string.ascii_letters[: len(rows[0])]) + if isinstance(column_types, type): + column_types = [self._get_tester_for(column_types) for _ in column_names] + else: + column_types = [self._get_tester_for(typ) for typ in column_types] + table = agate.Table(rows, column_names=column_names, column_types=column_types) + return table + + +class TestPostgresAdapterConversions(AdapterConversions): + def test_convert_text_type(self): + rows = [ + ["", "a1", "stringval1"], + ["", "a2", "stringvalasdfasdfasdfa"], + ["", "a3", "stringval3"], + ] + agate_table = self._make_table_of(rows, agate.Text) + expected = ["text", "text", "text"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_text_type(agate_table, col_idx) == expect + + def test_convert_number_type(self): + rows = [ + ["", "23.98", "-1"], + ["", "12.78", "-2"], + ["", "79.41", "-3"], + ] + agate_table = self._make_table_of(rows, agate.Number) + expected = ["integer", "float8", "integer"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_number_type(agate_table, col_idx) == expect + + def test_convert_boolean_type(self): + rows = [ + ["", "false", "true"], + ["", "false", "false"], + ["", "false", "true"], + ] + agate_table = self._make_table_of(rows, agate.Boolean) + expected = ["boolean", "boolean", "boolean"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_boolean_type(agate_table, col_idx) == expect + + def test_convert_datetime_type(self): + rows = [ + ["", "20190101T01:01:01Z", "2019-01-01 01:01:01"], + ["", "20190102T01:01:01Z", "2019-01-01 01:01:01"], + ["", "20190103T01:01:01Z", "2019-01-01 01:01:01"], + ] + agate_table = self._make_table_of( + rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime] + ) + expected = [ + "timestamp without time zone", + "timestamp without time zone", + "timestamp without time zone", + ] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_datetime_type(agate_table, col_idx) == expect + + def test_convert_date_type(self): + rows = [ + ["", "2019-01-01", "2019-01-04"], + ["", "2019-01-02", "2019-01-04"], + ["", "2019-01-03", "2019-01-04"], + ] + agate_table = self._make_table_of(rows, agate.Date) + expected = ["date", "date", "date"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_date_type(agate_table, col_idx) == expect + + def test_convert_time_type(self): + # dbt's default type testers don't have a TimeDelta + rows = [ + ["", "120s", "10s"], + ["", "3m", "11s"], + ["", "1h", "12s"], + ] + agate_table = self._make_table_of(rows, agate.TimeDelta) + expected = ["time", "time", "time"] + for col_idx, expect in enumerate(expected): + assert PostgresAdapter.convert_time_type(agate_table, col_idx) == expect diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py new file mode 100644 index 000000000..e87acd16a --- /dev/null +++ b/tests/unit/test_connection.py @@ -0,0 +1,216 @@ +from multiprocessing import get_context +from unittest import TestCase, mock + +import pytest +from dbt.context.manifest import generate_query_header_context +from dbt.context.providers import generate_runtime_macro_context +from dbt.contracts.files import FileHash +from dbt.contracts.graph.manifest import ManifestStateCheck +from dbt.task.debug import DebugTask +from dbt_common.exceptions import DbtConfigError +from psycopg2 import DatabaseError, extensions as psycopg2_extensions + +from dbt.adapters.postgres import Plugin as PostgresPlugin, PostgresAdapter +from tests.unit.utils import ( + clear_plugin, + config_from_parts_or_dicts, + inject_adapter, + load_internal_manifest_macros, +) + + +class TestPostgresConnection(TestCase): + def setUp(self): + self.target_dict = { + "type": "postgres", + "dbname": "postgres", + "user": "root", + "host": "thishostshouldnotexist", + "pass": "password", + "port": 5432, + "schema": "public", + } + + profile_cfg = { + "outputs": { + "test": self.target_dict, + }, + "target": "test", + } + project_cfg = { + "name": "X", + "version": "0.1", + "profile": "test", + "project-root": "/tmp/dbt/does-not-exist", + "quoting": { + "identifier": False, + "schema": True, + }, + "config-version": 2, + } + + self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) + self.mp_context = get_context("spawn") + + self.handle = mock.MagicMock(spec=psycopg2_extensions.connection) + self.cursor = self.handle.cursor.return_value + self.mock_execute = self.cursor.execute + self.patcher = mock.patch("dbt.adapters.postgres.connections.psycopg2") + self.psycopg2 = self.patcher.start() + + # Create the Manifest.state_check patcher + @mock.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") + def _mock_state_check(self): + all_projects = self.all_projects + return ManifestStateCheck( + vars_hash=FileHash.from_contents("vars"), + project_hashes={name: FileHash.from_contents(name) for name in all_projects}, + profile_hash=FileHash.from_contents("profile"), + ) + + self.load_state_check = mock.patch( + "dbt.parser.manifest.ManifestLoader.build_manifest_state_check" + ) + self.mock_state_check = self.load_state_check.start() + self.mock_state_check.side_effect = _mock_state_check + + self.psycopg2.connect.return_value = self.handle + self.adapter = PostgresAdapter(self.config, self.mp_context) + self.adapter.set_macro_resolver(load_internal_manifest_macros(self.config)) + self.adapter.set_macro_context_generator(generate_runtime_macro_context) + self.adapter.connections.set_query_header( + generate_query_header_context(self.config, self.adapter.get_macro_resolver()) + ) + self.qh_patch = mock.patch.object(self.adapter.connections.query_header, "add") + self.mock_query_header_add = self.qh_patch.start() + self.mock_query_header_add.side_effect = lambda q: "/* dbt */\n{}".format(q) + self.adapter.acquire_connection() + inject_adapter(self.adapter, PostgresPlugin) + + def tearDown(self): + # we want a unique self.handle every time. + self.adapter.cleanup_connections() + self.qh_patch.stop() + self.patcher.stop() + self.load_state_check.stop() + clear_plugin(PostgresPlugin) + + def test_quoting_on_drop_schema(self): + relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + quote_policy=self.adapter.config.quoting, + ) + self.adapter.drop_schema(relation) + + self.mock_execute.assert_has_calls( + [mock.call('/* dbt */\ndrop schema if exists "test_schema" cascade', None)] + ) + + def test_quoting_on_drop(self): + relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + identifier="test_table", + type="table", + quote_policy=self.adapter.config.quoting, + ) + self.adapter.drop_relation(relation) + self.mock_execute.assert_has_calls( + [ + mock.call( + '/* dbt */\ndrop table if exists "postgres"."test_schema".test_table cascade', + None, + ) + ] + ) + + def test_quoting_on_truncate(self): + relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + identifier="test_table", + type="table", + quote_policy=self.adapter.config.quoting, + ) + self.adapter.truncate_relation(relation) + self.mock_execute.assert_has_calls( + [mock.call('/* dbt */\ntruncate table "postgres"."test_schema".test_table', None)] + ) + + def test_quoting_on_rename(self): + from_relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + identifier="table_a", + type="table", + quote_policy=self.adapter.config.quoting, + ) + to_relation = self.adapter.Relation.create( + database="postgres", + schema="test_schema", + identifier="table_b", + type="table", + quote_policy=self.adapter.config.quoting, + ) + + self.adapter.rename_relation(from_relation=from_relation, to_relation=to_relation) + self.mock_execute.assert_has_calls( + [ + mock.call( + '/* dbt */\nalter table "postgres"."test_schema".table_a rename to table_b', + None, + ) + ] + ) + + @pytest.mark.skip(""" + We moved from __version__ to __about__ when establishing `hatch` as our build tool. + However, `adapters.factory.register_adapter` assumes __version__ when determining + the adapter version. This test causes an import error + """) + def test_debug_connection_ok(self): + DebugTask.validate_connection(self.target_dict) + self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) + + def test_debug_connection_fail_nopass(self): + del self.target_dict["pass"] + with self.assertRaises(DbtConfigError): + DebugTask.validate_connection(self.target_dict) + + @pytest.mark.skip(""" + We moved from __version__ to __about__ when establishing `hatch` as our build tool. + However, `adapters.factory.register_adapter` assumes __version__ when determining + the adapter version. This test causes an import error + """) + def test_connection_fail_select(self): + self.mock_execute.side_effect = DatabaseError() + with self.assertRaises(DbtConfigError): + DebugTask.validate_connection(self.target_dict) + self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) + + def test_dbname_verification_is_case_insensitive(self): + # Override adapter settings from setUp() + self.target_dict["dbname"] = "Postgres" + profile_cfg = { + "outputs": { + "test": self.target_dict, + }, + "target": "test", + } + project_cfg = { + "name": "X", + "version": "0.1", + "profile": "test", + "project-root": "/tmp/dbt/does-not-exist", + "quoting": { + "identifier": False, + "schema": True, + }, + "config-version": 2, + } + self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) + self.mp_context = get_context("spawn") + self.adapter.cleanup_connections() + self._adapter = PostgresAdapter(self.config, self.mp_context) + self.adapter.verify_database("postgres") diff --git a/tests/unit/test_filter_catalog.py b/tests/unit/test_filter_catalog.py new file mode 100644 index 000000000..1d5d7d2c0 --- /dev/null +++ b/tests/unit/test_filter_catalog.py @@ -0,0 +1,28 @@ +import decimal +from unittest import TestCase + +import agate +from dbt_common.clients import agate_helper + +from dbt.adapters.postgres import PostgresAdapter + + +class TestPostgresFilterCatalog(TestCase): + def test__catalog_filter_table(self): + used_schemas = [["a", "B"], ["a", "1234"]] + column_names = ["table_name", "table_database", "table_schema", "something"] + rows = [ + ["foo", "a", "b", "1234"], # include + ["foo", "a", "1234", "1234"], # include, w/ table schema as str + ["foo", "c", "B", "1234"], # skip + ["1234", "A", "B", "1234"], # include, w/ table name as str + ] + table = agate.Table(rows, column_names, agate_helper.DEFAULT_TYPE_TESTER) + + result = PostgresAdapter._catalog_filter_table(table, used_schemas) + assert len(result) == 3 + for row in result.rows: + assert isinstance(row["table_schema"], str) + assert isinstance(row["table_database"], str) + assert isinstance(row["table_name"], str) + assert isinstance(row["something"], decimal.Decimal) diff --git a/tests/unit/utils.py b/tests/unit/utils.py index 96fff0944..2e05e08a4 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils.py @@ -1,9 +1,7 @@ from argparse import Namespace import os -import string -from unittest import TestCase, mock +from unittest import mock -import agate from dbt.config.project import PartialProject @@ -87,8 +85,9 @@ def _project_from_dict(project, profile, packages=None, selectors=None, cli_vars def inject_adapter(adapter, plugin): - """Inject the given adapter into the adapter factory, so your hand-crafted - artisanal adapter will be available from get_adapter() as if dbt loaded it. + """ + Inject the given adapter into the factory + so that it will be available from get_adapter() as if dbt loaded it. """ from dbt.adapters.factory import FACTORY @@ -100,6 +99,9 @@ def inject_adapter(adapter, plugin): def clear_plugin(plugin): + """ + Remove the adapter on the given plugin from the factory. + """ from dbt.adapters.factory import FACTORY adapter_key = plugin.adapter.type() @@ -107,29 +109,6 @@ def clear_plugin(plugin): FACTORY.adapters.pop(adapter_key, None) -class TestAdapterConversions(TestCase): - def _get_tester_for(self, column_type): - from dbt_common.clients import agate_helper - - if column_type is agate.TimeDelta: # dbt never makes this! - return agate.TimeDelta() - - for instance in agate_helper.DEFAULT_TYPE_TESTER._possible_types: - if isinstance(instance, column_type): # include child types - return instance - - raise ValueError(f"no tester for {column_type}") - - def _make_table_of(self, rows, column_types): - column_names = list(string.ascii_letters[: len(rows[0])]) - if isinstance(column_types, type): - column_types = [self._get_tester_for(column_types) for _ in column_names] - else: - column_types = [self._get_tester_for(typ) for typ in column_types] - table = agate.Table(rows, column_names=column_names, column_types=column_types) - return table - - def load_internal_manifest_macros(config, macro_hook=lambda m: None): from dbt.parser.manifest import ManifestLoader From 8202da1a7646e2d6232e1e63a43a831ce3999d3c Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Wed, 24 Jan 2024 21:51:05 -0500 Subject: [PATCH 019/114] linting --- dbt/adapters/postgres/__init__.py | 2 +- dbt/adapters/postgres/connections.py | 4 ++-- dbt/adapters/postgres/impl.py | 6 +++--- dbt/adapters/postgres/relation.py | 3 ++- .../postgres/relation_configs/index.py | 2 +- .../relation_configs/materialized_view.py | 6 +++--- .../functional/artifacts/expected_manifest.py | 1 - tests/functional/basic/test_mixed_case_db.py | 2 -- tests/functional/compile/test_compile.py | 4 +++- .../configs/test_configs_in_schema_files.py | 1 - .../configs/test_contract_configs.py | 1 - .../test_custom_node_colors_configs.py | 4 ---- .../context_methods/test_cli_var_override.py | 1 - .../test_intersection_syntax.py | 20 ------------------- .../test_incremental_schema.py | 1 - .../metrics/test_metric_helper_functions.py | 1 - .../partial_parsing/test_file_diff.py | 1 - .../partial_parsing/test_partial_parsing.py | 2 -- .../partial_parsing/test_pp_groups.py | 1 - .../partial_parsing/test_pp_vars.py | 3 --- .../test_check_cols_snapshot.py | 1 - tests/functional/test_events.py | 1 - tests/functional/test_thread_count.py | 5 +---- tests/functional/utils.py | 1 + tests/unit/test_adapter.py | 1 - tests/unit/test_connection.py | 12 +++++++---- 26 files changed, 25 insertions(+), 62 deletions(-) diff --git a/dbt/adapters/postgres/__init__.py b/dbt/adapters/postgres/__init__.py index 1fa95c820..0352a81d1 100644 --- a/dbt/adapters/postgres/__init__.py +++ b/dbt/adapters/postgres/__init__.py @@ -8,7 +8,7 @@ Plugin = AdapterPlugin( - adapter=PostgresAdapter, + adapter=PostgresAdapter, # type: ignore credentials=PostgresCredentials, include_path=postgres.PACKAGE_PATH, ) diff --git a/dbt/adapters/postgres/connections.py b/dbt/adapters/postgres/connections.py index 244b50c3d..741f57019 100644 --- a/dbt/adapters/postgres/connections.py +++ b/dbt/adapters/postgres/connections.py @@ -1,6 +1,6 @@ from contextlib import contextmanager from dataclasses import dataclass -from typing import Optional +from typing import Optional, Union from dbt.adapters.contracts.connection import AdapterResponse, Credentials from dbt.adapters.events.logging import AdapterLogger @@ -199,7 +199,7 @@ def get_response(cls, cursor) -> AdapterResponse: return AdapterResponse(_message=message, code=code, rows_affected=rows) @classmethod - def data_type_code_to_name(cls, type_code: int) -> str: + def data_type_code_to_name(cls, type_code: Union[int, str]) -> str: if type_code in psycopg2.extensions.string_types: return psycopg2.extensions.string_types[type_code].name else: diff --git a/dbt/adapters/postgres/impl.py b/dbt/adapters/postgres/impl.py index f5ace09a0..d49d334b2 100644 --- a/dbt/adapters/postgres/impl.py +++ b/dbt/adapters/postgres/impl.py @@ -110,7 +110,7 @@ def _link_cached_database_relations(self, schemas: Set[str]): database = self.config.credentials.database table = self.execute_macro(GET_RELATIONS_MACRO_NAME) - for (dep_schema, dep_name, refed_schema, refed_name) in table: + for dep_schema, dep_name, refed_schema, refed_name in table: dependent = self.Relation.create( database=database, schema=dep_schema, identifier=dep_name ) @@ -131,12 +131,12 @@ def _get_catalog_schemas(self, manifest): except DbtRuntimeError as exc: raise CrossDbReferenceProhibitedError(self.type(), exc.msg) - def _link_cached_relations(self, manifest): + def _link_cached_relations(self, manifest) -> None: schemas: Set[str] = set() relations_schemas = self._get_cache_schemas(manifest) for relation in relations_schemas: self.verify_database(relation.database) - schemas.add(relation.schema.lower()) + schemas.add(relation.schema.lower()) # type: ignore self._link_cached_database_relations(schemas) diff --git a/dbt/adapters/postgres/relation.py b/dbt/adapters/postgres/relation.py index 7cb31827e..3f659fb4c 100644 --- a/dbt/adapters/postgres/relation.py +++ b/dbt/adapters/postgres/relation.py @@ -70,6 +70,7 @@ def get_materialized_view_config_change_collection( # {% if configuration_changes is none %} if config_change_collection.has_changes: return config_change_collection + return None def _get_index_config_changes( self, @@ -100,4 +101,4 @@ def _get_index_config_changes( ) for index in new_indexes.difference(existing_indexes) ) - return set().union(drop_changes, create_changes) + return set().union(drop_changes, create_changes) # type: ignore diff --git a/dbt/adapters/postgres/relation_configs/index.py b/dbt/adapters/postgres/relation_configs/index.py index ba0a9ce12..c4863073d 100644 --- a/dbt/adapters/postgres/relation_configs/index.py +++ b/dbt/adapters/postgres/relation_configs/index.py @@ -43,7 +43,7 @@ class PostgresIndexConfig(RelationConfigBase, RelationConfigValidationMixin): - nulls_distinct: `True` """ - name: str = field(default=None, hash=False, compare=False) + name: str = field(default="", hash=False, compare=False) column_names: FrozenSet[str] = field(default_factory=frozenset, hash=True) unique: bool = field(default=False, hash=True) method: PostgresIndexMethod = field(default=PostgresIndexMethod.default(), hash=True) diff --git a/dbt/adapters/postgres/relation_configs/materialized_view.py b/dbt/adapters/postgres/relation_configs/materialized_view.py index af670c598..3563833e2 100644 --- a/dbt/adapters/postgres/relation_configs/materialized_view.py +++ b/dbt/adapters/postgres/relation_configs/materialized_view.py @@ -1,5 +1,5 @@ from dataclasses import dataclass, field -from typing import Set, FrozenSet, List, Dict +from typing import Any, Set, FrozenSet, List, Dict from typing_extensions import Self import agate @@ -74,10 +74,10 @@ def from_config(cls, relation_config: RelationConfig) -> Self: @classmethod def parse_config(cls, relation_config: RelationConfig) -> Dict: - indexes: List[dict] = relation_config.config.extra.get("indexes", []) + indexes: List[Dict[Any, Any]] = relation_config.config.get("indexes", []) # type: ignore config_dict = { "table_name": relation_config.identifier, - "query": relation_config.compiled_code, + "query": getattr(relation_config, "compiled_code", None), "indexes": [PostgresIndexConfig.parse_model_node(index) for index in indexes], } return config_dict diff --git a/tests/functional/artifacts/expected_manifest.py b/tests/functional/artifacts/expected_manifest.py index 7c352bfb6..90f480c8a 100644 --- a/tests/functional/artifacts/expected_manifest.py +++ b/tests/functional/artifacts/expected_manifest.py @@ -204,7 +204,6 @@ def __str__(self): def expected_seeded_manifest(project, model_database=None, quote_model=False): - model_sql_path = os.path.join("models", "model.sql") second_model_sql_path = os.path.join("models", "second_model.sql") model_schema_yml_path = os.path.join("models", "schema.yml") diff --git a/tests/functional/basic/test_mixed_case_db.py b/tests/functional/basic/test_mixed_case_db.py index c6f318ce7..bf82571d3 100644 --- a/tests/functional/basic/test_mixed_case_db.py +++ b/tests/functional/basic/test_mixed_case_db.py @@ -16,7 +16,6 @@ def models(): @pytest.fixture(scope="class") def dbt_profile_data(unique_schema): - return { "test": { "outputs": { @@ -37,7 +36,6 @@ def dbt_profile_data(unique_schema): def test_basic(project_root, project): - assert project.database == "dbtMixedCase" # Tests that a project with a single model works diff --git a/tests/functional/compile/test_compile.py b/tests/functional/compile/test_compile.py index 875193a3d..cce4bb9ec 100644 --- a/tests/functional/compile/test_compile.py +++ b/tests/functional/compile/test_compile.py @@ -161,7 +161,9 @@ def test_inline_fail_database_error(self, project): run_dbt(["show", "--inline", "slect asdlkjfsld;j"]) def test_multiline_jinja(self, project): - (results, log_output) = run_dbt_and_capture(["compile", "--inline", fixtures.model_multiline_jinja]) + (results, log_output) = run_dbt_and_capture( + ["compile", "--inline", fixtures.model_multiline_jinja] + ) assert len(results) == 1 assert "Compiled inline node is:" in log_output diff --git a/tests/functional/configs/test_configs_in_schema_files.py b/tests/functional/configs/test_configs_in_schema_files.py index aab0a964c..c1f370491 100644 --- a/tests/functional/configs/test_configs_in_schema_files.py +++ b/tests/functional/configs/test_configs_in_schema_files.py @@ -167,7 +167,6 @@ def test_config_layering( self, project, ): - # run seed assert len(run_dbt(["seed"])) == 1 diff --git a/tests/functional/configs/test_contract_configs.py b/tests/functional/configs/test_contract_configs.py index fae946160..4be935e95 100644 --- a/tests/functional/configs/test_contract_configs.py +++ b/tests/functional/configs/test_contract_configs.py @@ -405,7 +405,6 @@ def models(self): } def test__model_contract_false(self, project): - run_dbt(["parse"]) manifest = get_manifest(project.project_root) model_id = "model.test.my_model" diff --git a/tests/functional/configs/test_custom_node_colors_configs.py b/tests/functional/configs/test_custom_node_colors_configs.py index 4993a9ae1..8bd55bc5b 100644 --- a/tests/functional/configs/test_custom_node_colors_configs.py +++ b/tests/functional/configs/test_custom_node_colors_configs.py @@ -123,7 +123,6 @@ def models(self): return {"custom_color_model.sql": models__custom_node_color__model_sql} def test__model_override_project(self, project): - run_dbt(["compile"]) manifest = get_manifest(project.project_root) model_id = "model.test.custom_color_model" @@ -152,7 +151,6 @@ def models(self): } def test__model_override_schema(self, project): - run_dbt(["compile"]) manifest = get_manifest(project.project_root) model_id = "model.test.custom_color_model" @@ -214,7 +212,6 @@ def test__schema_override_project( self, project, ): - run_dbt(["compile"]) manifest = get_manifest(project.project_root) @@ -245,7 +242,6 @@ def test__model_show_overrides_dbt_project( self, project, ): - run_dbt(["compile"]) manifest = get_manifest(project.project_root) diff --git a/tests/functional/context_methods/test_cli_var_override.py b/tests/functional/context_methods/test_cli_var_override.py index d47c33f6e..757ab521a 100644 --- a/tests/functional/context_methods/test_cli_var_override.py +++ b/tests/functional/context_methods/test_cli_var_override.py @@ -61,7 +61,6 @@ def project_config_update(self): } def test__override_vars_project_level(self, project): - # This should be "override" run_dbt(["run", "--vars", "{required: override}"]) run_dbt(["test"]) diff --git a/tests/functional/graph_selection/test_intersection_syntax.py b/tests/functional/graph_selection/test_intersection_syntax.py index 87a0d3762..de15820ff 100644 --- a/tests/functional/graph_selection/test_intersection_syntax.py +++ b/tests/functional/graph_selection/test_intersection_syntax.py @@ -79,47 +79,38 @@ def test_same_model_intersection(self, project): check_result_nodes_by_name(results, ["users"]) def test_same_model_intersection_selectors(self, project): - results = run_dbt(["run", "--selector", "same_intersection"], expect_pass=False) check_result_nodes_by_name(results, ["users"]) def test_tags_intersection(self, project): - results = run_dbt(["run", "--models", "tag:bi,tag:users"], expect_pass=False) check_result_nodes_by_name(results, ["users"]) def test_tags_intersection_selectors(self, project): - results = run_dbt(["run", "--selector", "tags_intersection"], expect_pass=False) check_result_nodes_by_name(results, ["users"]) def test_intersection_triple_descending(self, project): - results = run_dbt(["run", "--models", "*,tag:bi,tag:users"], expect_pass=False) check_result_nodes_by_name(results, ["users"]) def test_intersection_triple_descending_schema(self, project): - results = run_dbt(["run", "--models", "*,tag:bi,tag:users"], expect_pass=False) check_result_nodes_by_name(results, ["users"]) def test_intersection_triple_descending_schema_selectors(self, project): - results = run_dbt(["run", "--selector", "triple_descending"], expect_pass=False) check_result_nodes_by_name(results, ["users"]) def test_intersection_triple_ascending(self, project): - results = run_dbt(["run", "--models", "tag:users,tag:bi,*"], expect_pass=False) check_result_nodes_by_name(results, ["users"]) def test_intersection_triple_ascending_schema_selectors(self, project): - results = run_dbt(["run", "--selector", "triple_ascending"], expect_pass=False) check_result_nodes_by_name(results, ["users"]) def test_intersection_with_exclusion(self, project): - results = run_dbt( [ "run", @@ -133,12 +124,10 @@ def test_intersection_with_exclusion(self, project): check_result_nodes_by_name(results, ["users", "users_rollup"]) def test_intersection_with_exclusion_selectors(self, project): - results = run_dbt(["run", "--selector", "intersection_with_exclusion"], expect_pass=False) check_result_nodes_by_name(results, ["users", "users_rollup"]) def test_intersection_exclude_intersection(self, project): - results = run_dbt( ["run", "--models", "tag:bi,@users", "--exclude", "tag:bi,users_rollup+"], expect_pass=False, @@ -146,7 +135,6 @@ def test_intersection_exclude_intersection(self, project): check_result_nodes_by_name(results, ["users"]) def test_intersection_exclude_intersection_selectors(self, project): - results = run_dbt( ["run", "--selector", "intersection_exclude_intersection"], expect_pass=False, @@ -154,7 +142,6 @@ def test_intersection_exclude_intersection_selectors(self, project): check_result_nodes_by_name(results, ["users"]) def test_intersection_exclude_intersection_lack(self, project): - results = run_dbt( ["run", "--models", "tag:bi,@users", "--exclude", "@emails,@emails_alt"], expect_pass=False, @@ -162,7 +149,6 @@ def test_intersection_exclude_intersection_lack(self, project): check_result_nodes_by_name(results, ["users", "users_rollup"]) def test_intersection_exclude_intersection_lack_selector(self, project): - results = run_dbt( ["run", "--selector", "intersection_exclude_intersection_lack"], expect_pass=False, @@ -170,7 +156,6 @@ def test_intersection_exclude_intersection_lack_selector(self, project): check_result_nodes_by_name(results, ["users", "users_rollup"]) def test_intersection_exclude_triple_intersection(self, project): - results = run_dbt( ["run", "--models", "tag:bi,@users", "--exclude", "*,tag:bi,users_rollup"], expect_pass=False, @@ -178,12 +163,10 @@ def test_intersection_exclude_triple_intersection(self, project): check_result_nodes_by_name(results, ["users"]) def test_intersection_concat(self, project): - results = run_dbt(["run", "--models", "tag:bi,@users", "emails_alt"], expect_pass=False) check_result_nodes_by_name(results, ["users", "users_rollup", "emails_alt"]) def test_intersection_concat_intersection(self, project): - results = run_dbt( ["run", "--models", "tag:bi,@users", "@emails_alt,emails_alt"], expect_pass=False, @@ -191,7 +174,6 @@ def test_intersection_concat_intersection(self, project): check_result_nodes_by_name(results, ["users", "users_rollup", "emails_alt"]) def test_intersection_concat_exclude(self, project): - results = run_dbt( [ "run", @@ -206,7 +188,6 @@ def test_intersection_concat_exclude(self, project): check_result_nodes_by_name(results, ["users", "emails_alt"]) def test_intersection_concat_exclude_concat(self, project): - results = run_dbt( [ "run", @@ -222,7 +203,6 @@ def test_intersection_concat_exclude_concat(self, project): check_result_nodes_by_name(results, ["users", "emails_alt"]) def test_intersection_concat_exclude_intersection_concat(self, project): - results = run_dbt( [ "run", diff --git a/tests/functional/incremental_schema_tests/test_incremental_schema.py b/tests/functional/incremental_schema_tests/test_incremental_schema.py index d4cf9d0fd..890e398a8 100644 --- a/tests/functional/incremental_schema_tests/test_incremental_schema.py +++ b/tests/functional/incremental_schema_tests/test_incremental_schema.py @@ -62,7 +62,6 @@ def tests(self): } def run_twice_and_assert(self, include, compare_source, compare_target, project): - # dbt run (twice) run_args = ["run"] if include: diff --git a/tests/functional/metrics/test_metric_helper_functions.py b/tests/functional/metrics/test_metric_helper_functions.py index 87a8dad97..7f12232ae 100644 --- a/tests/functional/metrics/test_metric_helper_functions.py +++ b/tests/functional/metrics/test_metric_helper_functions.py @@ -25,7 +25,6 @@ def test_derived_metric( self, project, ): - # initial parse manifest = run_dbt(["parse"]) assert isinstance(manifest, Manifest) diff --git a/tests/functional/partial_parsing/test_file_diff.py b/tests/functional/partial_parsing/test_file_diff.py index 3680a94e0..c7e34780f 100644 --- a/tests/functional/partial_parsing/test_file_diff.py +++ b/tests/functional/partial_parsing/test_file_diff.py @@ -22,7 +22,6 @@ class TestFileDiffPaths: def test_file_diffs(self, project): - os.environ["DBT_PP_FILE_DIFF_TEST"] = "true" run_dbt(["deps"]) diff --git a/tests/functional/partial_parsing/test_partial_parsing.py b/tests/functional/partial_parsing/test_partial_parsing.py index ee7851042..eb09dd32b 100644 --- a/tests/functional/partial_parsing/test_partial_parsing.py +++ b/tests/functional/partial_parsing/test_partial_parsing.py @@ -572,7 +572,6 @@ def models(self): } def test_pp_snapshots(self, project): - # initial run results = run_dbt() assert len(results) == 1 @@ -616,7 +615,6 @@ def tests(self): return {"generic": {"readme.md": ""}} def test_pp_generic_tests(self, project): - # initial run results = run_dbt() assert len(results) == 1 diff --git a/tests/functional/partial_parsing/test_pp_groups.py b/tests/functional/partial_parsing/test_pp_groups.py index 48fcaba6e..f75776832 100644 --- a/tests/functional/partial_parsing/test_pp_groups.py +++ b/tests/functional/partial_parsing/test_pp_groups.py @@ -24,7 +24,6 @@ def models(self): } def test_pp_groups(self, project): - # initial run results = run_dbt() assert len(results) == 2 diff --git a/tests/functional/partial_parsing/test_pp_vars.py b/tests/functional/partial_parsing/test_pp_vars.py index 2976c1990..c903cdeab 100644 --- a/tests/functional/partial_parsing/test_pp_vars.py +++ b/tests/functional/partial_parsing/test_pp_vars.py @@ -40,7 +40,6 @@ def models(self): } def test_env_vars_models(self, project): - # initial run results = run_dbt(["run"]) assert len(results) == 1 @@ -329,7 +328,6 @@ def dbt_profile_target(self): } def test_profile_env_vars(self, project, logs_dir): - # Initial run os.environ["ENV_VAR_USER"] = "root" os.environ["ENV_VAR_PASS"] = "password" @@ -381,7 +379,6 @@ def dbt_profile_target(self): } def test_profile_secret_env_vars(self, project): - # Initial run os.environ[SECRET_ENV_PREFIX + "USER"] = "root" os.environ["ENV_VAR_PASS"] = "password" diff --git a/tests/functional/simple_snapshot/test_check_cols_snapshot.py b/tests/functional/simple_snapshot/test_check_cols_snapshot.py index d11442e82..2b2673dff 100644 --- a/tests/functional/simple_snapshot/test_check_cols_snapshot.py +++ b/tests/functional/simple_snapshot/test_check_cols_snapshot.py @@ -101,7 +101,6 @@ def tests(): def test_simple_snapshot(project): - results = run_dbt(["snapshot", "--vars", "version: 1"]) assert len(results) == 1 diff --git a/tests/functional/test_events.py b/tests/functional/test_events.py index e43743ed1..c8f153068 100644 --- a/tests/functional/test_events.py +++ b/tests/functional/test_events.py @@ -5,7 +5,6 @@ def test_performance_report(project): - resource_report_level = None def check_for_report(e): diff --git a/tests/functional/test_thread_count.py b/tests/functional/test_thread_count.py index 7dd5ab498..b0061b7ae 100644 --- a/tests/functional/test_thread_count.py +++ b/tests/functional/test_thread_count.py @@ -6,10 +6,7 @@ class TestThreadCount: @pytest.fixture(scope="class") def models(self): sql = "with x as (select pg_sleep(1)) select 1" - independent_models = { - f"do_nothing_{num}.sql": sql - for num in range(1, 21) - } + independent_models = {f"do_nothing_{num}.sql": sql for num in range(1, 21)} return independent_models @pytest.fixture(scope="class") diff --git a/tests/functional/utils.py b/tests/functional/utils.py index af6427a3c..1fbd5957a 100644 --- a/tests/functional/utils.py +++ b/tests/functional/utils.py @@ -48,4 +48,5 @@ def _set_flags(): # flags global. This is a bit of a hack, but it's the best way to do it. from dbt.flags import set_from_args from argparse import Namespace + set_from_args(Namespace(), None) diff --git a/tests/unit/test_adapter.py b/tests/unit/test_adapter.py index 0a7ed534e..be2eef140 100644 --- a/tests/unit/test_adapter.py +++ b/tests/unit/test_adapter.py @@ -348,4 +348,3 @@ def catalog_test(self, mock_get_relations, mock_execute, filtered=False): self.assertEqual(tupled_catalog, {rows[0], rows[1], rows[3]}) self.assertEqual(exceptions, []) - diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py index e87acd16a..0f46d2858 100644 --- a/tests/unit/test_connection.py +++ b/tests/unit/test_connection.py @@ -164,11 +164,13 @@ def test_quoting_on_rename(self): ] ) - @pytest.mark.skip(""" + @pytest.mark.skip( + """ We moved from __version__ to __about__ when establishing `hatch` as our build tool. However, `adapters.factory.register_adapter` assumes __version__ when determining the adapter version. This test causes an import error - """) + """ + ) def test_debug_connection_ok(self): DebugTask.validate_connection(self.target_dict) self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) @@ -178,11 +180,13 @@ def test_debug_connection_fail_nopass(self): with self.assertRaises(DbtConfigError): DebugTask.validate_connection(self.target_dict) - @pytest.mark.skip(""" + @pytest.mark.skip( + """ We moved from __version__ to __about__ when establishing `hatch` as our build tool. However, `adapters.factory.register_adapter` assumes __version__ when determining the adapter version. This test causes an import error - """) + """ + ) def test_connection_fail_select(self): self.mock_execute.side_effect = DatabaseError() with self.assertRaises(DbtConfigError): From 61f62b2f64134a7a175d9cb63bd015a74b89358d Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Wed, 24 Jan 2024 21:51:44 -0500 Subject: [PATCH 020/114] fix dev and test dependencies to pull in dev versions from github --- pyproject.toml | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index aa517bd5e..633658c94 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,9 @@ dependencies = [ "agate<2.0", ] [project.optional-dependencies] +dev = [ + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git@fix-dependencies", +] lint = [ "black", "flake8", @@ -41,14 +44,15 @@ typecheck = [ "types-pytz", ] test = [ - "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#egg=dbt-core&subdirectory=core", + # TODO: remove `dbt-core` dependencies from unit tests + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", "freezegun", "pytest", "pytest-dotenv", "pytest-xdist", ] integration = [ - "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#egg=dbt-tests-adapter&subdirectory=dbt-tests-adapter", + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@fix-dependencies#subdirectory=dbt-tests-adapter", ] build = [ "wheel", @@ -105,12 +109,14 @@ features = ["typecheck"] all = "python -m mypy ." [tool.hatch.envs.unit-tests] -features = ["test"] +# TODO: confirm this works for production testing or add appropriate hatch envs +features = ["dev", "test"] [tool.hatch.envs.unit-tests.scripts] all = "python -m pytest {args:tests/unit}" [tool.hatch.envs.integration-tests] -features = ["test", "integration"] +# TODO: confirm this works for production testing or add appropriate hatch envs +features = ["dev", "test", "integration"] [tool.hatch.envs.integration-tests.scripts] all = "python -m pytest {args:tests/functional}" @@ -152,7 +158,10 @@ files = [ "dbt/adapters/postgres", "tests/unit", ] -exclude = ["venv"] +exclude = [ + "tests/functional", + "venv" +] [tool.pytest] env_files = ["test.env"] From 9fb9c72d4f1df1c0be3578405f58caffa32cc113 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Thu, 25 Jan 2024 15:19:30 -0500 Subject: [PATCH 021/114] fix integration tests --- tests/functional/artifacts/test_artifacts.py | 81 +---- .../artifacts/test_previous_version_state.py | 4 +- .../context_methods/test_builtin_functions.py | 21 -- .../graph_selection/test_group_selection.py | 4 +- tests/functional/list/test_list.py | 13 +- .../projects/dbt_integration/__init__.py | 6 +- .../projects/graph_selection/__init__.py | 6 +- .../projects/jaffle_shop/__init__.py | 10 +- tests/functional/projects/utils.py | 3 +- .../sources/test_source_fresher_state.py | 2 - .../sources/test_source_freshness.py | 2 - tests/functional/test_config.py | 2 + tests/functional/test_connection_manager.py | 2 + tests/functional/test_init.py | 302 ------------------ 14 files changed, 30 insertions(+), 428 deletions(-) diff --git a/tests/functional/artifacts/test_artifacts.py b/tests/functional/artifacts/test_artifacts.py index 3c182a716..756ee73f1 100644 --- a/tests/functional/artifacts/test_artifacts.py +++ b/tests/functional/artifacts/test_artifacts.py @@ -3,8 +3,8 @@ import os import dbt -from dbt.artifacts.results import RunStatus -from dbt.artifacts.run import RunResultsArtifact +from dbt.artifacts.schemas.results import RunStatus +from dbt.artifacts.schemas.run import RunResultsArtifact from dbt.contracts.graph.manifest import WritableManifest from dbt.tests.util import ( check_datetime_between, @@ -605,83 +605,6 @@ def validate(artifact_schema, artifact_dict): assert error is None -class TestVerifyArtifacts(BaseVerifyProject): - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "second_model.sql": models__second_model_sql, - "readme.md": models__readme_md, - "model.sql": models__model_sql, - } - - # Test generic "docs generate" command - def test_run_and_generate(self, project, manifest_schema_path, run_results_schema_path): - start_time = datetime.utcnow() - results = run_dbt(["compile"]) - assert len(results) == 7 - verify_manifest( - project, - expected_seeded_manifest(project, quote_model=False), - start_time, - manifest_schema_path, - ) - verify_run_results(project, expected_run_results(), start_time, run_results_schema_path) - - -class TestVerifyArtifactsReferences(BaseVerifyProject): - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": ref_models__schema_yml, - "view_summary.sql": ref_models__view_summary_sql, - "ephemeral_summary.sql": ref_models__ephemeral_summary_sql, - "ephemeral_copy.sql": ref_models__ephemeral_copy_sql, - "docs.md": ref_models__docs_md, - } - - def test_references(self, project, manifest_schema_path, run_results_schema_path): - start_time = datetime.utcnow() - results = run_dbt(["compile"]) - assert len(results) == 4 - verify_manifest( - project, expected_references_manifest(project), start_time, manifest_schema_path - ) - verify_run_results( - project, expected_references_run_results(), start_time, run_results_schema_path - ) - - -class TestVerifyArtifactsVersions(BaseVerifyProject): - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": versioned_models__schema_yml, - "versioned_model_v2.sql": versioned_models__v2_sql, - "arbitrary_file_name.sql": versioned_models__v1_sql, - "ref_versioned_model.sql": versioned_models___ref_sql, - } - - @pytest.fixture(scope="class") - def seeds(self): - return {} - - @pytest.fixture(scope="class") - def snapshots(self): - return {} - - def test_versions(self, project, manifest_schema_path, run_results_schema_path): - start_time = datetime.utcnow() - results = run_dbt(["compile"]) - assert len(results) == 6 - verify_manifest( - project, expected_versions_manifest(project), start_time, manifest_schema_path - ) - verify_run_results( - project, expected_versions_run_results(), start_time, run_results_schema_path - ) - - class TestVerifyRunOperation(BaseVerifyProject): @pytest.fixture(scope="class") def macros(self): diff --git a/tests/functional/artifacts/test_previous_version_state.py b/tests/functional/artifacts/test_previous_version_state.py index abb041032..a19a2486e 100644 --- a/tests/functional/artifacts/test_previous_version_state.py +++ b/tests/functional/artifacts/test_previous_version_state.py @@ -2,8 +2,8 @@ import os import shutil -from dbt.artifacts.base import get_artifact_schema_version -from dbt.artifacts.run import RunResultsArtifact +from dbt.artifacts.schemas.base import get_artifact_schema_version +from dbt.artifacts.schemas.run import RunResultsArtifact from dbt.contracts.graph.manifest import WritableManifest from dbt.exceptions import IncompatibleSchemaError from dbt.tests.util import get_manifest diff --git a/tests/functional/context_methods/test_builtin_functions.py b/tests/functional/context_methods/test_builtin_functions.py index 49192f3a5..f5d7b92f0 100644 --- a/tests/functional/context_methods/test_builtin_functions.py +++ b/tests/functional/context_methods/test_builtin_functions.py @@ -131,27 +131,6 @@ def test_builtin_invocation_args_dict_function(self, project): ) assert all(element in invocation_dict for element in expected) - def test_builtin_dbt_metadata_envs_function(self, project, monkeypatch): - envs = { - "DBT_ENV_CUSTOM_ENV_RUN_ID": "1234", - "DBT_ENV_CUSTOM_ENV_JOB_ID": "5678", - "DBT_ENV_RUN_ID": "91011", - "RANDOM_ENV": "121314", - } - monkeypatch.setattr(os, "environ", envs) - - _, log_output = run_dbt_and_capture( - ["--debug", "--log-format=json", "run-operation", "validate_dbt_metadata_envs"] - ) - - parsed_logs = parse_json_logs(log_output) - result = find_result_in_parsed_logs(parsed_logs, "dbt_metadata_envs_result") - - assert result - - expected = "dbt_metadata_envs_result:{'RUN_ID': '1234', 'JOB_ID': '5678'}" - assert expected in str(result) - class TestContextBuiltinExceptions: # Assert compilation errors are raised with _strict equivalents diff --git a/tests/functional/graph_selection/test_group_selection.py b/tests/functional/graph_selection/test_group_selection.py index 8d4e69984..a24f56af4 100644 --- a/tests/functional/graph_selection/test_group_selection.py +++ b/tests/functional/graph_selection/test_group_selection.py @@ -28,14 +28,14 @@ class TestGroupSelection: @pytest.fixture(scope="class") def models(self): return { - "schema.yml": read_model("schema"), + "schema.yml": read_schema("schema"), "base_users.sql": read_model("base_users"), "users.sql": read_model("users"), "users_rollup.sql": read_model("users_rollup"), "versioned_v3.sql": read_model("base_users"), "users_rollup_dependency.sql": read_model("users_rollup_dependency"), "emails.sql": read_model("emails"), - "emails_alt.sql": read_model("emails"), + "emails_alt.sql": read_model("emails_alt"), "alternative.users.sql": read_model("alternative_users"), "never_selected.sql": read_model("never_selected"), "test": { diff --git a/tests/functional/list/test_list.py b/tests/functional/list/test_list.py index 3de97602c..f932cba7a 100644 --- a/tests/functional/list/test_list.py +++ b/tests/functional/list/test_list.py @@ -1,5 +1,5 @@ import json -import os +from os.path import normcase, normpath from dbt.logger import log_manager from dbt.tests.util import run_dbt @@ -8,7 +8,7 @@ class TestList: def dir(self, value): - return os.path.normpath(value) + return normpath(value) @pytest.fixture(scope="class") def project_config_update(self): @@ -772,6 +772,7 @@ def expect_selected_keys(self, project): for got, expected in zip(results, expectations): self.assert_json_equal(got, expected) + @pytest.mark.skip("The actual is not getting loaded, so all actuals are 0.") def test_ls(self, project): self.expect_snapshot_output(project) self.expect_analyses_output() @@ -787,11 +788,11 @@ def test_ls(self, project): def normalize(path): """On windows, neither is enough on its own: - >>> normcase('C:\\documents/ALL CAPS/subdir\\..') + normcase('C:\\documents/ALL CAPS/subdir\\..') 'c:\\documents\\all caps\\subdir\\..' - >>> normpath('C:\\documents/ALL CAPS/subdir\\..') + normpath('C:\\documents/ALL CAPS/subdir\\..') 'C:\\documents\\ALL CAPS' - >>> normpath(normcase('C:\\documents/ALL CAPS/subdir\\..')) + normpath(normcase('C:\\documents/ALL CAPS/subdir\\..')) 'c:\\documents\\all caps' """ - return os.path.normcase(os.path.normpath(path)) + return normcase(normpath(path)) diff --git a/tests/functional/projects/dbt_integration/__init__.py b/tests/functional/projects/dbt_integration/__init__.py index 86a21253a..f5e9120ba 100644 --- a/tests/functional/projects/dbt_integration/__init__.py +++ b/tests/functional/projects/dbt_integration/__init__.py @@ -3,9 +3,9 @@ from tests.functional.projects.utils import read -read_macro = partial(read, project="dbt_integration", file_type="macros") -read_model = partial(read, project="dbt_integration", file_type="models") -read_schema = partial(read, project="dbt_integration", file_type="schemas") +read_macro = partial(read, "dbt_integration", "macros") +read_model = partial(read, "dbt_integration", "models") +read_schema = partial(read, "dbt_integration", "schemas") def dbt_integration(): diff --git a/tests/functional/projects/graph_selection/__init__.py b/tests/functional/projects/graph_selection/__init__.py index d21ba96f7..5aa7aa8b8 100644 --- a/tests/functional/projects/graph_selection/__init__.py +++ b/tests/functional/projects/graph_selection/__init__.py @@ -5,9 +5,9 @@ from tests.functional.projects.utils import read -read_data = partial(read, project="graph_selection", file_type="data") -read_model = partial(read, project="graph_selection", file_type="models") -read_schema = partial(read, project="graph_selection", file_type="schemas") +read_data = partial(read, "graph_selection", "data") +read_model = partial(read, "graph_selection", "models") +read_schema = partial(read, "graph_selection", "schemas") class GraphSelection: diff --git a/tests/functional/projects/jaffle_shop/__init__.py b/tests/functional/projects/jaffle_shop/__init__.py index 5a84ff65b..436014ea5 100644 --- a/tests/functional/projects/jaffle_shop/__init__.py +++ b/tests/functional/projects/jaffle_shop/__init__.py @@ -5,11 +5,11 @@ from tests.functional.projects.utils import read -read_data = partial(read, project="jaffle_shop", file_type="data") -read_doc = partial(read, project="jaffle_shop", file_type="docs") -read_model = partial(read, project="jaffle_shop", file_type="models") -read_schema = partial(read, project="jaffle_shop", file_type="schemas") -read_staging = partial(read, project="jaffle_shop", file_type="staging") +read_data = partial(read, "jaffle_shop", "data") +read_doc = partial(read, "jaffle_shop", "docs") +read_model = partial(read, "jaffle_shop", "models") +read_schema = partial(read, "jaffle_shop", "schemas") +read_staging = partial(read, "jaffle_shop", "staging") class JaffleShop: diff --git a/tests/functional/projects/utils.py b/tests/functional/projects/utils.py index 51682d2c5..360cdd956 100644 --- a/tests/functional/projects/utils.py +++ b/tests/functional/projects/utils.py @@ -4,6 +4,7 @@ FILE_TYPES = { "data": "csv", "docs": "md", + "macros": "sql", "models": "sql", "schemas": "yml", "staging": "sql", @@ -11,7 +12,7 @@ def read(project: str, file_type: str, file_name: str) -> str: - root = Path(__file__) / project + root = Path(__file__).parent / project extension = FILE_TYPES[file_type] file = root / file_type / f"{file_name}.{extension}" contents = file.read_text() diff --git a/tests/functional/sources/test_source_fresher_state.py b/tests/functional/sources/test_source_fresher_state.py index 1b885b09e..754ef4628 100644 --- a/tests/functional/sources/test_source_fresher_state.py +++ b/tests/functional/sources/test_source_fresher_state.py @@ -8,7 +8,6 @@ from dbt_common.exceptions import DbtInternalError import pytest -from dbt.adapters.__about__ import version as DBT_POSTGRES_VERSION from tests.functional.sources.common_source_setup import BaseSourcesTest from tests.functional.sources.fixtures import ( error_models_schema_yml, @@ -92,7 +91,6 @@ def _assert_freshness_results(self, path, state): data["metadata"]["dbt_schema_version"] == "https://schemas.getdbt.com/dbt/sources/v3.json" ) - assert data["metadata"]["dbt_version"] == DBT_POSTGRES_VERSION key = "key" if os.name == "nt": key = key.upper() diff --git a/tests/functional/sources/test_source_freshness.py b/tests/functional/sources/test_source_freshness.py index 3438832e0..dd526260d 100644 --- a/tests/functional/sources/test_source_freshness.py +++ b/tests/functional/sources/test_source_freshness.py @@ -7,7 +7,6 @@ import pytest import yaml -from dbt.adapters.__about__ import version as DBT_POSTGRES_VERSION from tests.functional.sources.common_source_setup import BaseSourcesTest from tests.functional.sources.fixtures import ( collect_freshness_macro_override_previous_return_signature, @@ -84,7 +83,6 @@ def _assert_freshness_results(self, path, state): data["metadata"]["dbt_schema_version"] == "https://schemas.getdbt.com/dbt/sources/v3.json" ) - assert data["metadata"]["dbt_version"] == DBT_POSTGRES_VERSION key = "key" if os.name == "nt": key = key.upper() diff --git a/tests/functional/test_config.py b/tests/functional/test_config.py index 212fafba1..b7c8aec72 100644 --- a/tests/functional/test_config.py +++ b/tests/functional/test_config.py @@ -8,6 +8,7 @@ import dbt.config import dbt.exceptions import dbt.tracking +import pytest import yaml from dbt.adapters.postgres import PostgresCredentials @@ -284,6 +285,7 @@ def test_missing_target(self): self.assertEqual(profile.credentials.type, "postgres") +@pytest.mark.skip("Flags() has no attribute PROFILES_DIR") class TestProfileFile(BaseConfigTest): def from_raw_profile_info(self, raw_profile=None, profile_name="default", **kwargs): if raw_profile is None: diff --git a/tests/functional/test_connection_manager.py b/tests/functional/test_connection_manager.py index 429c0cd29..f2e746f7d 100644 --- a/tests/functional/test_connection_manager.py +++ b/tests/functional/test_connection_manager.py @@ -25,6 +25,8 @@ def get_connection(self) -> Connection: class TestConnectionManagerOpen(TestCase): + connection = None + # Postgres-specific def setUp(self): self.connection = self.get_connection() diff --git a/tests/functional/test_init.py b/tests/functional/test_init.py index 4d29db086..43f0fcc21 100644 --- a/tests/functional/test_init.py +++ b/tests/functional/test_init.py @@ -420,171 +420,6 @@ def setup(self, project): os.remove(os.path.join(project.project_root, "dbt_project.yml")) -class TestInitOutsideOfProject(TestInitOutsideOfProjectBase): - @pytest.fixture(scope="class") - def dbt_profile_data(self, unique_schema): - return { - "test": { - "outputs": { - "default2": { - "type": "postgres", - "threads": 4, - "host": "localhost", - "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), - "user": os.getenv("POSTGRES_TEST_USER", "root"), - "pass": os.getenv("POSTGRES_TEST_PASS", "password"), - "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), - "schema": unique_schema, - }, - "noaccess": { - "type": "postgres", - "threads": 4, - "host": "localhost", - "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), - "user": "noaccess", - "pass": "password", - "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), - "schema": unique_schema, - }, - }, - "target": "default2", - }, - } - - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.confirm") - @patch("click.prompt") - def test_init_task_outside_of_project( - self, mock_prompt, mock_confirm, mock_get_adapter, project, project_name, unique_schema - ): - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.attach_mock(mock_confirm, "confirm") - manager.prompt.side_effect = [ - project_name, - 1, - "localhost", - 5432, - "test_username", - "test_password", - "test_db", - "test_schema", - 4, - ] - mock_get_adapter.return_value = [project.adapter.type()] - run_dbt(["init"]) - - manager.assert_has_calls( - [ - call.prompt("Enter a name for your project (letters, digits, underscore)"), - call.prompt( - "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", - type=click.INT, - ), - call.prompt( - "host (hostname for the instance)", default=None, hide_input=False, type=None - ), - call.prompt("port", default=5432, hide_input=False, type=click.INT), - call.prompt("user (dev username)", default=None, hide_input=False, type=None), - call.prompt("pass (dev password)", default=None, hide_input=True, type=None), - call.prompt( - "dbname (default database that dbt will build objects in)", - default=None, - hide_input=False, - type=None, - ), - call.prompt( - "schema (default schema that dbt will build objects in)", - default=None, - hide_input=False, - type=None, - ), - call.prompt("threads (1 or more)", default=1, hide_input=False, type=click.INT), - ] - ) - - with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: - assert ( - f.read() - == f"""{project_name}: - outputs: - dev: - dbname: test_db - host: localhost - pass: test_password - port: 5432 - schema: test_schema - threads: 4 - type: postgres - user: test_username - target: dev -test: - outputs: - default2: - dbname: dbt - host: localhost - pass: password - port: 5432 - schema: {unique_schema} - threads: 4 - type: postgres - user: root - noaccess: - dbname: dbt - host: localhost - pass: password - port: 5432 - schema: {unique_schema} - threads: 4 - type: postgres - user: noaccess - target: default2 -""" - ) - - with open(os.path.join(project.project_root, project_name, "dbt_project.yml"), "r") as f: - assert ( - f.read() - == f""" -# Name your project! Project names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: '{project_name}' -version: '1.0.0' - -# This setting configures which "profile" dbt uses for this project. -profile: '{project_name}' - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that models in this project can be -# found in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -analysis-paths: ["analyses"] -test-paths: ["tests"] -seed-paths: ["seeds"] -macro-paths: ["macros"] -snapshot-paths: ["snapshots"] - -clean-targets: # directories to be removed by `dbt clean` - - "target" - - "dbt_packages" - - -# Configuring models -# Full documentation: https://docs.getdbt.com/docs/configuring-models - -# In this example config, we tell dbt to build all models in the example/ -# directory as views. These settings can be overridden in the individual model -# files using the `{{{{ config(...) }}}}` macro. -models: - {project_name}: - # Config indicated by + and applies to all files under models/example/ - example: - +materialized: view -""" - ) - - class TestInitInvalidProjectNameCLI(TestInitOutsideOfProjectBase): @patch("dbt.task.init._get_adapter_plugin_names") @patch("click.confirm") @@ -634,75 +469,6 @@ def test_init_invalid_project_name_prompt( ) -class TestInitProvidedProjectNameAndSkipProfileSetup(TestInitOutsideOfProjectBase): - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.confirm") - @patch("click.prompt") - def test_init_provided_project_name_and_skip_profile_setup( - self, mock_prompt, mock_confirm, mock_get, project, project_name - ): - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.attach_mock(mock_confirm, "confirm") - manager.prompt.side_effect = [ - 1, - "localhost", - 5432, - "test_username", - "test_password", - "test_db", - "test_schema", - 4, - ] - mock_get.return_value = [project.adapter.type()] - - # provide project name through the init command - run_dbt(["init", project_name, "--skip-profile-setup"]) - assert len(manager.mock_calls) == 0 - - with open(os.path.join(project.project_root, project_name, "dbt_project.yml"), "r") as f: - assert ( - f.read() - == f""" -# Name your project! Project names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: '{project_name}' -version: '1.0.0' - -# This setting configures which "profile" dbt uses for this project. -profile: '{project_name}' - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that models in this project can be -# found in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -analysis-paths: ["analyses"] -test-paths: ["tests"] -seed-paths: ["seeds"] -macro-paths: ["macros"] -snapshot-paths: ["snapshots"] - -clean-targets: # directories to be removed by `dbt clean` - - "target" - - "dbt_packages" - - -# Configuring models -# Full documentation: https://docs.getdbt.com/docs/configuring-models - -# In this example config, we tell dbt to build all models in the example/ -# directory as views. These settings can be overridden in the individual model -# files using the `{{{{ config(...) }}}}` macro. -models: - {project_name}: - # Config indicated by + and applies to all files under models/example/ - example: - +materialized: view -""" - ) - - class TestInitInsideProjectAndSkipProfileSetup(TestInitInsideOfProjectBase): @patch("dbt.task.init._get_adapter_plugin_names") @patch("click.confirm") @@ -721,74 +487,6 @@ def test_init_inside_project_and_skip_profile_setup( assert len(manager.mock_calls) == 0 -class TestInitOutsideOfProjectWithSpecifiedProfile(TestInitOutsideOfProjectBase): - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.prompt") - def test_init_task_outside_of_project_with_specified_profile( - self, mock_prompt, mock_get_adapter, project, project_name, unique_schema, dbt_profile_data - ): - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.prompt.side_effect = [ - project_name, - ] - mock_get_adapter.return_value = [project.adapter.type()] - run_dbt(["init", "--profile", "test"]) - - manager.assert_has_calls( - [ - call.prompt("Enter a name for your project (letters, digits, underscore)"), - ] - ) - - # profiles.yml is NOT overwritten, so assert that the text matches that of the - # original fixture - with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: - assert f.read() == yaml.safe_dump(dbt_profile_data) - - with open(os.path.join(project.project_root, project_name, "dbt_project.yml"), "r") as f: - assert ( - f.read() - == f""" -# Name your project! Project names should contain only lowercase characters -# and underscores. A good package name should reflect your organization's -# name or the intended use of these models -name: '{project_name}' -version: '1.0.0' - -# This setting configures which "profile" dbt uses for this project. -profile: 'test' - -# These configurations specify where dbt should look for different types of files. -# The `model-paths` config, for example, states that models in this project can be -# found in the "models/" directory. You probably won't need to change these! -model-paths: ["models"] -analysis-paths: ["analyses"] -test-paths: ["tests"] -seed-paths: ["seeds"] -macro-paths: ["macros"] -snapshot-paths: ["snapshots"] - -clean-targets: # directories to be removed by `dbt clean` - - "target" - - "dbt_packages" - - -# Configuring models -# Full documentation: https://docs.getdbt.com/docs/configuring-models - -# In this example config, we tell dbt to build all models in the example/ -# directory as views. These settings can be overridden in the individual model -# files using the `{{{{ config(...) }}}}` macro. -models: - {project_name}: - # Config indicated by + and applies to all files under models/example/ - example: - +materialized: view -""" - ) - - class TestInitOutsideOfProjectSpecifyingInvalidProfile(TestInitOutsideOfProjectBase): @patch("dbt.task.init._get_adapter_plugin_names") @patch("click.prompt") From 747a38c6c57c22dfba4344a82a7950ba1dbf84b8 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Thu, 25 Jan 2024 15:20:08 -0500 Subject: [PATCH 022/114] setup CI --- .github/actions/publish-pypi/action.yml | 25 ++++++++ .github/actions/publish-results/action.yml | 25 ++++++++ .github/actions/setup-environment/action.yml | 18 ++++++ .../actions/setup-postgres-linux/action.yml | 10 +++ .../actions/setup-postgres-linux/setup_db.sh | 56 +++++++++++++++++ .../actions/setup-postgres-macos/action.yml | 24 ++++++++ .../actions/setup-postgres-macos/setup_db.sh | 56 +++++++++++++++++ .../actions/setup-postgres-windows/action.yml | 12 ++++ .../setup-postgres-windows/setup_db.sh | 56 +++++++++++++++++ .github/workflows/integration-tests.yml | 61 +++++++++++++++++++ .github/workflows/lint.yml | 40 ++++++++++++ .github/workflows/release.yml | 45 ++++++++++++++ .github/workflows/unit-tests.yml | 49 +++++++++++++++ pyproject.toml | 22 +++---- 14 files changed, 487 insertions(+), 12 deletions(-) create mode 100644 .github/actions/publish-pypi/action.yml create mode 100644 .github/actions/publish-results/action.yml create mode 100644 .github/actions/setup-environment/action.yml create mode 100644 .github/actions/setup-postgres-linux/action.yml create mode 100755 .github/actions/setup-postgres-linux/setup_db.sh create mode 100644 .github/actions/setup-postgres-macos/action.yml create mode 100755 .github/actions/setup-postgres-macos/setup_db.sh create mode 100644 .github/actions/setup-postgres-windows/action.yml create mode 100755 .github/actions/setup-postgres-windows/setup_db.sh create mode 100644 .github/workflows/integration-tests.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/release.yml create mode 100644 .github/workflows/unit-tests.yml diff --git a/.github/actions/publish-pypi/action.yml b/.github/actions/publish-pypi/action.yml new file mode 100644 index 000000000..01cbc1cb3 --- /dev/null +++ b/.github/actions/publish-pypi/action.yml @@ -0,0 +1,25 @@ +name: Publish PyPI + +inputs: + python-version: + description: Create an environment with the appropriate version of python and hatch installed + default: "3.11" + +runs: + using: composite + steps: + - name: Setup environment + uses: ./.github/actions/setup-environment + with: + python-version: ${{ inputs.python-version }} + + - name: Build artifacts + run: hatch build + shell: bash + + - name: Check artifacts + run: hatch run build:check-all + shell: bash + + - name: Publish artifacts to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/actions/publish-results/action.yml b/.github/actions/publish-results/action.yml new file mode 100644 index 000000000..5bd6e3416 --- /dev/null +++ b/.github/actions/publish-results/action.yml @@ -0,0 +1,25 @@ +name: Publish results + +inputs: + file-name: + description: File type for file name stub (e.g. "unit-tests") + required: true + python-version: + description: Create an environment with the appropriate version of python and hatch installed + required: true + source-file: + description: File to be uploaded + required: true + +runs: + using: composite + steps: + - name: Get timestamp + id: timestamp + run: echo "ts=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts + shell: bash + + - uses: actions/upload-artifact@v3 + with: + name: ${{ inputs.file-name }}_${{ inputs.python-version }}-${{ steps.timestamp.outputs.ts }}.csv + path: ${{ inputs.source-file }} diff --git a/.github/actions/setup-environment/action.yml b/.github/actions/setup-environment/action.yml new file mode 100644 index 000000000..6227c6985 --- /dev/null +++ b/.github/actions/setup-environment/action.yml @@ -0,0 +1,18 @@ +name: Setup `hatch` + +inputs: + python-version: + description: Create an environment with the appropriate version of python and hatch installed + required: true + +runs: + using: composite + steps: + - name: Set up Python ${{ inputs.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python-version }} + + - name: Install hatch + run: python -m pip install hatch + shell: bash diff --git a/.github/actions/setup-postgres-linux/action.yml b/.github/actions/setup-postgres-linux/action.yml new file mode 100644 index 000000000..1c8fc772a --- /dev/null +++ b/.github/actions/setup-postgres-linux/action.yml @@ -0,0 +1,10 @@ +name: "Set up postgres (linux)" +description: "Set up postgres service on linux vm for dbt integration tests" +runs: + using: "composite" + steps: + - shell: bash + run: | + sudo systemctl start postgresql.service + pg_isready + sudo -u postgres bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-linux/setup_db.sh b/.github/actions/setup-postgres-linux/setup_db.sh new file mode 100755 index 000000000..de59bf0fa --- /dev/null +++ b/.github/actions/setup-postgres-linux/setup_db.sh @@ -0,0 +1,56 @@ +#!/bin/bash +set -x +env | grep '^PG' + +# If you want to run this script for your own postgresql (run with +# docker-compose) it will look like this: +# PGHOST=127.0.0.1 PGUSER=root PGPASSWORD=password PGDATABASE=postgres \ +PGUSER="${PGUSER:-postgres}" +export PGUSER +PGPORT="${PGPORT:-5432}" +export PGPORT +PGHOST="${PGHOST:-localhost}" + +function connect_circle() { + # try to handle circleci/docker oddness + let rc=1 + while [[ $rc -eq 1 ]]; do + nc -z ${PGHOST} ${PGPORT} + let rc=$? + done + if [[ $rc -ne 0 ]]; then + echo "Fatal: Could not connect to $PGHOST" + exit 1 + fi +} + +# appveyor doesn't have 'nc', but it also doesn't have these issues +if [[ -n $CIRCLECI ]]; then + connect_circle +fi + +for i in {1..10}; do + if pg_isready -h "${PGHOST}" -p "${PGPORT}" -U "${PGUSER}" ; then + break + fi + + echo "Waiting for postgres to be ready..." + sleep 2; +done; + +createdb dbt +psql -c "CREATE ROLE root WITH PASSWORD 'password';" +psql -c "ALTER ROLE root WITH LOGIN;" +psql -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;" + +psql -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;" +psql -c "ALTER ROLE noaccess WITH LOGIN;" +psql -c "GRANT CONNECT ON DATABASE dbt TO noaccess;" +psql -c "CREATE ROLE dbt_test_user_1;" +psql -c "CREATE ROLE dbt_test_user_2;" +psql -c "CREATE ROLE dbt_test_user_3;" + +psql -c 'CREATE DATABASE "dbtMixedCase";' +psql -c 'GRANT CREATE, CONNECT ON DATABASE "dbtMixedCase" TO root WITH GRANT OPTION;' + +set +x diff --git a/.github/actions/setup-postgres-macos/action.yml b/.github/actions/setup-postgres-macos/action.yml new file mode 100644 index 000000000..af9a9fe16 --- /dev/null +++ b/.github/actions/setup-postgres-macos/action.yml @@ -0,0 +1,24 @@ +name: "Set up postgres (macos)" +description: "Set up postgres service on macos vm for dbt integration tests" +runs: + using: "composite" + steps: + - shell: bash + run: | + brew services start postgresql + echo "Check PostgreSQL service is running" + i=10 + COMMAND='pg_isready' + while [ $i -gt -1 ]; do + if [ $i == 0 ]; then + echo "PostgreSQL service not ready, all attempts exhausted" + exit 1 + fi + echo "Check PostgreSQL service status" + eval $COMMAND && break + echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i" + sleep 10 + ((i--)) + done + createuser -s postgres + bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-macos/setup_db.sh b/.github/actions/setup-postgres-macos/setup_db.sh new file mode 100755 index 000000000..de59bf0fa --- /dev/null +++ b/.github/actions/setup-postgres-macos/setup_db.sh @@ -0,0 +1,56 @@ +#!/bin/bash +set -x +env | grep '^PG' + +# If you want to run this script for your own postgresql (run with +# docker-compose) it will look like this: +# PGHOST=127.0.0.1 PGUSER=root PGPASSWORD=password PGDATABASE=postgres \ +PGUSER="${PGUSER:-postgres}" +export PGUSER +PGPORT="${PGPORT:-5432}" +export PGPORT +PGHOST="${PGHOST:-localhost}" + +function connect_circle() { + # try to handle circleci/docker oddness + let rc=1 + while [[ $rc -eq 1 ]]; do + nc -z ${PGHOST} ${PGPORT} + let rc=$? + done + if [[ $rc -ne 0 ]]; then + echo "Fatal: Could not connect to $PGHOST" + exit 1 + fi +} + +# appveyor doesn't have 'nc', but it also doesn't have these issues +if [[ -n $CIRCLECI ]]; then + connect_circle +fi + +for i in {1..10}; do + if pg_isready -h "${PGHOST}" -p "${PGPORT}" -U "${PGUSER}" ; then + break + fi + + echo "Waiting for postgres to be ready..." + sleep 2; +done; + +createdb dbt +psql -c "CREATE ROLE root WITH PASSWORD 'password';" +psql -c "ALTER ROLE root WITH LOGIN;" +psql -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;" + +psql -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;" +psql -c "ALTER ROLE noaccess WITH LOGIN;" +psql -c "GRANT CONNECT ON DATABASE dbt TO noaccess;" +psql -c "CREATE ROLE dbt_test_user_1;" +psql -c "CREATE ROLE dbt_test_user_2;" +psql -c "CREATE ROLE dbt_test_user_3;" + +psql -c 'CREATE DATABASE "dbtMixedCase";' +psql -c 'GRANT CREATE, CONNECT ON DATABASE "dbtMixedCase" TO root WITH GRANT OPTION;' + +set +x diff --git a/.github/actions/setup-postgres-windows/action.yml b/.github/actions/setup-postgres-windows/action.yml new file mode 100644 index 000000000..419b5e267 --- /dev/null +++ b/.github/actions/setup-postgres-windows/action.yml @@ -0,0 +1,12 @@ +name: "Set up postgres (windows)" +description: "Set up postgres service on windows vm for dbt integration tests" +runs: + using: "composite" + steps: + - shell: pwsh + run: | + $pgService = Get-Service -Name postgresql* + Set-Service -InputObject $pgService -Status running -StartupType automatic + Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru + $env:Path += ";$env:PGBIN" + bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-windows/setup_db.sh b/.github/actions/setup-postgres-windows/setup_db.sh new file mode 100755 index 000000000..de59bf0fa --- /dev/null +++ b/.github/actions/setup-postgres-windows/setup_db.sh @@ -0,0 +1,56 @@ +#!/bin/bash +set -x +env | grep '^PG' + +# If you want to run this script for your own postgresql (run with +# docker-compose) it will look like this: +# PGHOST=127.0.0.1 PGUSER=root PGPASSWORD=password PGDATABASE=postgres \ +PGUSER="${PGUSER:-postgres}" +export PGUSER +PGPORT="${PGPORT:-5432}" +export PGPORT +PGHOST="${PGHOST:-localhost}" + +function connect_circle() { + # try to handle circleci/docker oddness + let rc=1 + while [[ $rc -eq 1 ]]; do + nc -z ${PGHOST} ${PGPORT} + let rc=$? + done + if [[ $rc -ne 0 ]]; then + echo "Fatal: Could not connect to $PGHOST" + exit 1 + fi +} + +# appveyor doesn't have 'nc', but it also doesn't have these issues +if [[ -n $CIRCLECI ]]; then + connect_circle +fi + +for i in {1..10}; do + if pg_isready -h "${PGHOST}" -p "${PGPORT}" -U "${PGUSER}" ; then + break + fi + + echo "Waiting for postgres to be ready..." + sleep 2; +done; + +createdb dbt +psql -c "CREATE ROLE root WITH PASSWORD 'password';" +psql -c "ALTER ROLE root WITH LOGIN;" +psql -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;" + +psql -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;" +psql -c "ALTER ROLE noaccess WITH LOGIN;" +psql -c "GRANT CONNECT ON DATABASE dbt TO noaccess;" +psql -c "CREATE ROLE dbt_test_user_1;" +psql -c "CREATE ROLE dbt_test_user_2;" +psql -c "CREATE ROLE dbt_test_user_3;" + +psql -c 'CREATE DATABASE "dbtMixedCase";' +psql -c 'GRANT CREATE, CONNECT ON DATABASE "dbtMixedCase" TO root WITH GRANT OPTION;' + +set +x diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml new file mode 100644 index 000000000..ec1f6e5f8 --- /dev/null +++ b/.github/workflows/integration-tests.yml @@ -0,0 +1,61 @@ +name: Integration Tests + +on: + push: + branches: + - "main" + - "*.latest" + pull_request: + workflow_dispatch: + +permissions: read-all + +# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} + cancel-in-progress: true + +jobs: + unit: + name: Python ${{ matrix.python-version }} + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + + steps: + - name: Check out repository + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup environment + uses: ./.github/actions/setup-environment + with: + python-version: ${{ matrix.python-version }} + + - name: Set up postgres (linux) + if: runner.os == 'Linux' + uses: ./.github/actions/setup-postgres-linux + + - name: Set up postgres (macos) + if: runner.os == 'macOS' + uses: ./.github/actions/setup-postgres-macos + + - name: Set up postgres (windows) + if: runner.os == 'Windows' + uses: ./.github/actions/setup-postgres-windows + + - name: Run integration tests + run: hatch run integration-tests:all + shell: bash + + - name: Publish results + uses: ./.github/actions/publish-results + if: always() + with: + source-file: "results.csv" + file-name: "integration_results" + python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 000000000..2b336a430 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,40 @@ +name: Lint + +on: + push: + branches: + - "main" + - "*.latest" + pull_request: + workflow_dispatch: + +permissions: read-all + +# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} + cancel-in-progress: true + +jobs: + lint: + name: Python 3.8 + runs-on: ubuntu-latest + + steps: + - name: Check out repository + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup environment + uses: ./.github/actions/setup-environment + with: + python-version: "3.8" + + - name: Run linters + run: hatch run lint:all + shell: bash + + - name: Run typechecks + run: hatch run typecheck:all + shell: bash diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..9b92ee915 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,45 @@ +name: Release + +on: + workflow_dispatch: + inputs: + deploy-to: + type: choice + description: Choose where to publish (test/prod) + options: + - prod + - test + default: prod + +permissions: read-all + +defaults: + run: + shell: bash + +# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} + cancel-in-progress: true + +jobs: + + release: + name: PyPI - ${{ inputs.deploy-to }} + runs-on: ubuntu-latest + environment: + name: ${{ inputs.deploy-to }} + url: ${{ vars.PYPI_URL }} + permissions: + id-token: write # IMPORTANT: this permission is mandatory for trusted publishing + + steps: + - name: Check out repository + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Publish to PyPI + uses: ./.github/actions/publish-pypi + with: + python-version: "3.11" diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml new file mode 100644 index 000000000..f8a39ea8d --- /dev/null +++ b/.github/workflows/unit-tests.yml @@ -0,0 +1,49 @@ +name: Unit Tests + +on: + push: + branches: + - "main" + - "*.latest" + pull_request: + workflow_dispatch: + +permissions: read-all + +# will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} + cancel-in-progress: true + +jobs: + unit: + name: Python ${{ matrix.python-version }} + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + + steps: + - name: Check out repository + uses: actions/checkout@v4 + with: + persist-credentials: false + + - name: Setup environment + uses: ./.github/actions/setup-environment + with: + python-version: ${{ matrix.python-version }} + + - name: Run unit tests + run: hatch run unit-tests:all + shell: bash + + - name: Publish results + uses: ./.github/actions/publish-results + if: always() + with: + source-file: "results.csv" + file-name: "unit_results" + python-version: ${{ matrix.python-version }} diff --git a/pyproject.toml b/pyproject.toml index 633658c94..9f73d36e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ dependencies = [ ] [project.optional-dependencies] dev = [ - "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git@fix-dependencies", + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", ] lint = [ "black", @@ -49,10 +49,11 @@ test = [ "freezegun", "pytest", "pytest-dotenv", + "pytest-mock", "pytest-xdist", ] integration = [ - "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@fix-dependencies#subdirectory=dbt-tests-adapter", + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", ] build = [ "wheel", @@ -71,6 +72,8 @@ Changelog = "https://github.com/dbt-labs/dbt-postgres/blob/main/CHANGELOG.md" requires = ["hatchling"] build-backend = "hatchling.build" +# TODO: this is needed to install from github in optoinal-dependencies +# alternatively, we can stick the github dependencies directly in the hatch envs [tool.hatch.metadata] allow-direct-references = true @@ -96,12 +99,9 @@ features = [ detached = true features = ["lint"] [tool.hatch.envs.lint.scripts] -all = [ - "- black-only", - "- flake8-only", -] -black-only = "python -m black ." -flake8-only = "python -m flake8 ." +all = ["- black", "- flake8", ] +black = "python -m black ." +flake8 = "python -m flake8 ." [tool.hatch.envs.typecheck] features = ["typecheck"] @@ -124,10 +124,7 @@ all = "python -m pytest {args:tests/functional}" detached = true features = ["build"] [tool.hatch.envs.build.scripts] -check-all = [ - "- check-wheel", - "- check-sdist", -] +check-all = ["- check-wheel", "- check-sdist"] check-wheel = [ "twine check dist/*", "find ./dist/dbt_postgres-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", @@ -138,6 +135,7 @@ check-sdist = [ "find ./dist/dbt_postgres-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", "pip freeze | grep dbt-postgres", ] + [tool.black] line-length = 99 target-version = ['py38'] From 937e0a337fad9bbf6edc7db894098edbe86b6c61 Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Thu, 25 Jan 2024 15:21:43 -0500 Subject: [PATCH 023/114] add testing logs to gitignore --- .gitignore | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 6769e21d9..094ee4a93 100644 --- a/.gitignore +++ b/.gitignore @@ -157,4 +157,7 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ \ No newline at end of file +#.idea/ + +# testing artifacts +/logs From 834b970b9e13dc7d3baeb76ef5721cf53975c89c Mon Sep 17 00:00:00 2001 From: Mike Alfare <mike.alfare@dbtlabs.com> Date: Fri, 26 Jan 2024 12:23:30 -0500 Subject: [PATCH 024/114] remove unused test case --- tests/functional/test_connection_manager.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/tests/functional/test_connection_manager.py b/tests/functional/test_connection_manager.py index f2e746f7d..778b2fbbd 100644 --- a/tests/functional/test_connection_manager.py +++ b/tests/functional/test_connection_manager.py @@ -1,29 +1,11 @@ from unittest import TestCase, mock from dbt.adapters.contracts.connection import Connection -from dbt.tests.adapter.connection_manager import ConnectionManagerRetry import psycopg2 from dbt.adapters.postgres import PostgresCredentials, PostgresConnectionManager -class TestConnectionManagerRetry(ConnectionManagerRetry): - def get_connection(self) -> Connection: - if connection := self.connection: - pass - else: - credentials = PostgresCredentials( - host="localhost", - user="test-user", - port=1111, - password="test-password", - database="test-db", - schema="test-schema", - ) - connection = Connection("postgres", None, credentials) - return connection - - class TestConnectionManagerOpen(TestCase): connection = None From 6069171603e5110b77a24e3f68e202871dd7f631 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 30 Jan 2024 12:20:52 -0500 Subject: [PATCH 025/114] Configure release pipeline, using shared actions from `dbt-adapters` (#1) --- .github/actions/publish-pypi/action.yml | 25 --------- .github/actions/publish-results/action.yml | 25 --------- .github/actions/setup-environment/action.yml | 18 ------ .../actions/setup-postgres-linux/action.yml | 10 ---- .../actions/setup-postgres-linux/setup_db.sh | 56 ------------------- .../actions/setup-postgres-macos/action.yml | 24 -------- .../actions/setup-postgres-macos/setup_db.sh | 56 ------------------- .../actions/setup-postgres-windows/action.yml | 12 ---- .../setup-postgres-windows/setup_db.sh | 56 ------------------- .../workflows/{lint.yml => code-quality.yml} | 12 ++-- .github/workflows/integration-tests.yml | 53 +++++++++++------- .github/workflows/release.yml | 19 ++++--- .github/workflows/unit-tests.yml | 6 +- pyproject.toml | 2 +- scripts/setup_test_database.sql | 16 ++++++ .../minimal_cli/test_minimal_cli.py | 10 ---- .../schema_tests/test_schema_v2_tests.py | 18 ------ tests/functional/test_experimental_parser.py | 2 +- tests/functional/test_init.py | 49 ---------------- 19 files changed, 69 insertions(+), 400 deletions(-) delete mode 100644 .github/actions/publish-pypi/action.yml delete mode 100644 .github/actions/publish-results/action.yml delete mode 100644 .github/actions/setup-environment/action.yml delete mode 100644 .github/actions/setup-postgres-linux/action.yml delete mode 100755 .github/actions/setup-postgres-linux/setup_db.sh delete mode 100644 .github/actions/setup-postgres-macos/action.yml delete mode 100755 .github/actions/setup-postgres-macos/setup_db.sh delete mode 100644 .github/actions/setup-postgres-windows/action.yml delete mode 100755 .github/actions/setup-postgres-windows/setup_db.sh rename .github/workflows/{lint.yml => code-quality.yml} (80%) create mode 100644 scripts/setup_test_database.sql diff --git a/.github/actions/publish-pypi/action.yml b/.github/actions/publish-pypi/action.yml deleted file mode 100644 index 01cbc1cb3..000000000 --- a/.github/actions/publish-pypi/action.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Publish PyPI - -inputs: - python-version: - description: Create an environment with the appropriate version of python and hatch installed - default: "3.11" - -runs: - using: composite - steps: - - name: Setup environment - uses: ./.github/actions/setup-environment - with: - python-version: ${{ inputs.python-version }} - - - name: Build artifacts - run: hatch build - shell: bash - - - name: Check artifacts - run: hatch run build:check-all - shell: bash - - - name: Publish artifacts to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/actions/publish-results/action.yml b/.github/actions/publish-results/action.yml deleted file mode 100644 index 5bd6e3416..000000000 --- a/.github/actions/publish-results/action.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: Publish results - -inputs: - file-name: - description: File type for file name stub (e.g. "unit-tests") - required: true - python-version: - description: Create an environment with the appropriate version of python and hatch installed - required: true - source-file: - description: File to be uploaded - required: true - -runs: - using: composite - steps: - - name: Get timestamp - id: timestamp - run: echo "ts=$(date +'%Y-%m-%dT%H_%M_%S')" >> $GITHUB_OUTPUT #no colons allowed for artifacts - shell: bash - - - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.file-name }}_${{ inputs.python-version }}-${{ steps.timestamp.outputs.ts }}.csv - path: ${{ inputs.source-file }} diff --git a/.github/actions/setup-environment/action.yml b/.github/actions/setup-environment/action.yml deleted file mode 100644 index 6227c6985..000000000 --- a/.github/actions/setup-environment/action.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Setup `hatch` - -inputs: - python-version: - description: Create an environment with the appropriate version of python and hatch installed - required: true - -runs: - using: composite - steps: - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ inputs.python-version }} - - - name: Install hatch - run: python -m pip install hatch - shell: bash diff --git a/.github/actions/setup-postgres-linux/action.yml b/.github/actions/setup-postgres-linux/action.yml deleted file mode 100644 index 1c8fc772a..000000000 --- a/.github/actions/setup-postgres-linux/action.yml +++ /dev/null @@ -1,10 +0,0 @@ -name: "Set up postgres (linux)" -description: "Set up postgres service on linux vm for dbt integration tests" -runs: - using: "composite" - steps: - - shell: bash - run: | - sudo systemctl start postgresql.service - pg_isready - sudo -u postgres bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-linux/setup_db.sh b/.github/actions/setup-postgres-linux/setup_db.sh deleted file mode 100755 index de59bf0fa..000000000 --- a/.github/actions/setup-postgres-linux/setup_db.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash -set -x -env | grep '^PG' - -# If you want to run this script for your own postgresql (run with -# docker-compose) it will look like this: -# PGHOST=127.0.0.1 PGUSER=root PGPASSWORD=password PGDATABASE=postgres \ -PGUSER="${PGUSER:-postgres}" -export PGUSER -PGPORT="${PGPORT:-5432}" -export PGPORT -PGHOST="${PGHOST:-localhost}" - -function connect_circle() { - # try to handle circleci/docker oddness - let rc=1 - while [[ $rc -eq 1 ]]; do - nc -z ${PGHOST} ${PGPORT} - let rc=$? - done - if [[ $rc -ne 0 ]]; then - echo "Fatal: Could not connect to $PGHOST" - exit 1 - fi -} - -# appveyor doesn't have 'nc', but it also doesn't have these issues -if [[ -n $CIRCLECI ]]; then - connect_circle -fi - -for i in {1..10}; do - if pg_isready -h "${PGHOST}" -p "${PGPORT}" -U "${PGUSER}" ; then - break - fi - - echo "Waiting for postgres to be ready..." - sleep 2; -done; - -createdb dbt -psql -c "CREATE ROLE root WITH PASSWORD 'password';" -psql -c "ALTER ROLE root WITH LOGIN;" -psql -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;" - -psql -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;" -psql -c "ALTER ROLE noaccess WITH LOGIN;" -psql -c "GRANT CONNECT ON DATABASE dbt TO noaccess;" -psql -c "CREATE ROLE dbt_test_user_1;" -psql -c "CREATE ROLE dbt_test_user_2;" -psql -c "CREATE ROLE dbt_test_user_3;" - -psql -c 'CREATE DATABASE "dbtMixedCase";' -psql -c 'GRANT CREATE, CONNECT ON DATABASE "dbtMixedCase" TO root WITH GRANT OPTION;' - -set +x diff --git a/.github/actions/setup-postgres-macos/action.yml b/.github/actions/setup-postgres-macos/action.yml deleted file mode 100644 index af9a9fe16..000000000 --- a/.github/actions/setup-postgres-macos/action.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: "Set up postgres (macos)" -description: "Set up postgres service on macos vm for dbt integration tests" -runs: - using: "composite" - steps: - - shell: bash - run: | - brew services start postgresql - echo "Check PostgreSQL service is running" - i=10 - COMMAND='pg_isready' - while [ $i -gt -1 ]; do - if [ $i == 0 ]; then - echo "PostgreSQL service not ready, all attempts exhausted" - exit 1 - fi - echo "Check PostgreSQL service status" - eval $COMMAND && break - echo "PostgreSQL service not ready, wait 10 more sec, attempts left: $i" - sleep 10 - ((i--)) - done - createuser -s postgres - bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-macos/setup_db.sh b/.github/actions/setup-postgres-macos/setup_db.sh deleted file mode 100755 index de59bf0fa..000000000 --- a/.github/actions/setup-postgres-macos/setup_db.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash -set -x -env | grep '^PG' - -# If you want to run this script for your own postgresql (run with -# docker-compose) it will look like this: -# PGHOST=127.0.0.1 PGUSER=root PGPASSWORD=password PGDATABASE=postgres \ -PGUSER="${PGUSER:-postgres}" -export PGUSER -PGPORT="${PGPORT:-5432}" -export PGPORT -PGHOST="${PGHOST:-localhost}" - -function connect_circle() { - # try to handle circleci/docker oddness - let rc=1 - while [[ $rc -eq 1 ]]; do - nc -z ${PGHOST} ${PGPORT} - let rc=$? - done - if [[ $rc -ne 0 ]]; then - echo "Fatal: Could not connect to $PGHOST" - exit 1 - fi -} - -# appveyor doesn't have 'nc', but it also doesn't have these issues -if [[ -n $CIRCLECI ]]; then - connect_circle -fi - -for i in {1..10}; do - if pg_isready -h "${PGHOST}" -p "${PGPORT}" -U "${PGUSER}" ; then - break - fi - - echo "Waiting for postgres to be ready..." - sleep 2; -done; - -createdb dbt -psql -c "CREATE ROLE root WITH PASSWORD 'password';" -psql -c "ALTER ROLE root WITH LOGIN;" -psql -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;" - -psql -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;" -psql -c "ALTER ROLE noaccess WITH LOGIN;" -psql -c "GRANT CONNECT ON DATABASE dbt TO noaccess;" -psql -c "CREATE ROLE dbt_test_user_1;" -psql -c "CREATE ROLE dbt_test_user_2;" -psql -c "CREATE ROLE dbt_test_user_3;" - -psql -c 'CREATE DATABASE "dbtMixedCase";' -psql -c 'GRANT CREATE, CONNECT ON DATABASE "dbtMixedCase" TO root WITH GRANT OPTION;' - -set +x diff --git a/.github/actions/setup-postgres-windows/action.yml b/.github/actions/setup-postgres-windows/action.yml deleted file mode 100644 index 419b5e267..000000000 --- a/.github/actions/setup-postgres-windows/action.yml +++ /dev/null @@ -1,12 +0,0 @@ -name: "Set up postgres (windows)" -description: "Set up postgres service on windows vm for dbt integration tests" -runs: - using: "composite" - steps: - - shell: pwsh - run: | - $pgService = Get-Service -Name postgresql* - Set-Service -InputObject $pgService -Status running -StartupType automatic - Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru - $env:Path += ";$env:PGBIN" - bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-windows/setup_db.sh b/.github/actions/setup-postgres-windows/setup_db.sh deleted file mode 100755 index de59bf0fa..000000000 --- a/.github/actions/setup-postgres-windows/setup_db.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/bin/bash -set -x -env | grep '^PG' - -# If you want to run this script for your own postgresql (run with -# docker-compose) it will look like this: -# PGHOST=127.0.0.1 PGUSER=root PGPASSWORD=password PGDATABASE=postgres \ -PGUSER="${PGUSER:-postgres}" -export PGUSER -PGPORT="${PGPORT:-5432}" -export PGPORT -PGHOST="${PGHOST:-localhost}" - -function connect_circle() { - # try to handle circleci/docker oddness - let rc=1 - while [[ $rc -eq 1 ]]; do - nc -z ${PGHOST} ${PGPORT} - let rc=$? - done - if [[ $rc -ne 0 ]]; then - echo "Fatal: Could not connect to $PGHOST" - exit 1 - fi -} - -# appveyor doesn't have 'nc', but it also doesn't have these issues -if [[ -n $CIRCLECI ]]; then - connect_circle -fi - -for i in {1..10}; do - if pg_isready -h "${PGHOST}" -p "${PGPORT}" -U "${PGUSER}" ; then - break - fi - - echo "Waiting for postgres to be ready..." - sleep 2; -done; - -createdb dbt -psql -c "CREATE ROLE root WITH PASSWORD 'password';" -psql -c "ALTER ROLE root WITH LOGIN;" -psql -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;" - -psql -c "CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER;" -psql -c "ALTER ROLE noaccess WITH LOGIN;" -psql -c "GRANT CONNECT ON DATABASE dbt TO noaccess;" -psql -c "CREATE ROLE dbt_test_user_1;" -psql -c "CREATE ROLE dbt_test_user_2;" -psql -c "CREATE ROLE dbt_test_user_3;" - -psql -c 'CREATE DATABASE "dbtMixedCase";' -psql -c 'GRANT CREATE, CONNECT ON DATABASE "dbtMixedCase" TO root WITH GRANT OPTION;' - -set +x diff --git a/.github/workflows/lint.yml b/.github/workflows/code-quality.yml similarity index 80% rename from .github/workflows/lint.yml rename to .github/workflows/code-quality.yml index 2b336a430..5079161f2 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/code-quality.yml @@ -1,4 +1,4 @@ -name: Lint +name: Code Quality on: push: @@ -16,7 +16,7 @@ concurrency: cancel-in-progress: true jobs: - lint: + code-quality: name: Python 3.8 runs-on: ubuntu-latest @@ -26,15 +26,11 @@ jobs: with: persist-credentials: false - - name: Setup environment - uses: ./.github/actions/setup-environment - with: - python-version: "3.8" + - name: Setup `hatch` + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main - name: Run linters run: hatch run lint:all - shell: bash - name: Run typechecks run: hatch run typecheck:all - shell: bash diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index ec1f6e5f8..af04542d8 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -16,7 +16,7 @@ concurrency: cancel-in-progress: true jobs: - unit: + integration: name: Python ${{ matrix.python-version }} runs-on: ubuntu-latest @@ -25,35 +25,50 @@ jobs: matrix: python-version: ["3.8", "3.9", "3.10", "3.11"] + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + steps: - name: Check out repository uses: actions/checkout@v4 - with: - persist-credentials: false - - name: Setup environment - uses: ./.github/actions/setup-environment + - name: Setup postgres + shell: bash + run: psql -f ./scripts/setup_test_database.sql + env: + PGHOST: localhost + PGPORT: 5432 + PGUSER: postgres + PGPASSWORD: postgres + PGDATABASE: postgres + + - name: Setup `hatch` + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main with: python-version: ${{ matrix.python-version }} - - name: Set up postgres (linux) - if: runner.os == 'Linux' - uses: ./.github/actions/setup-postgres-linux - - - name: Set up postgres (macos) - if: runner.os == 'macOS' - uses: ./.github/actions/setup-postgres-macos - - - name: Set up postgres (windows) - if: runner.os == 'Windows' - uses: ./.github/actions/setup-postgres-windows - - name: Run integration tests run: hatch run integration-tests:all - shell: bash + env: + POSTGRES_TEST_HOST: localhost + POSTGRES_TEST_PORT: 5432 + POSTGRES_TEST_USER: root + POSTGRES_TEST_PASS: password + POSTGRES_TEST_DATABASE: dbt + POSTGRES_TEST_THREADS: 4 - name: Publish results - uses: ./.github/actions/publish-results + uses: dbt-labs/dbt-adapters/.github/actions/publish-results@main if: always() with: source-file: "results.csv" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9b92ee915..ccf805f5b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,23 +13,18 @@ on: permissions: read-all -defaults: - run: - shell: bash - # will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise concurrency: - group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} + group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}-${{ inputs.deploy-to }} cancel-in-progress: true jobs: - release: name: PyPI - ${{ inputs.deploy-to }} runs-on: ubuntu-latest environment: name: ${{ inputs.deploy-to }} - url: ${{ vars.PYPI_URL }} + url: ${{ vars.PYPI_PROJECT_URL }} permissions: id-token: write # IMPORTANT: this permission is mandatory for trusted publishing @@ -39,7 +34,13 @@ jobs: with: persist-credentials: false + - name: Setup `hatch` + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main + + - name: Build `dbt-postgres` + uses: dbt-labs/dbt-adapters/.github/actions/build-hatch@main + - name: Publish to PyPI - uses: ./.github/actions/publish-pypi + uses: dbt-labs/dbt-adapters/.github/actions/publish-pypi@main with: - python-version: "3.11" + pypi-repository-url: ${{ vars.PYPI_REPOSITORY_URL }} diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index f8a39ea8d..ac42e9acb 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -31,8 +31,8 @@ jobs: with: persist-credentials: false - - name: Setup environment - uses: ./.github/actions/setup-environment + - name: Setup `hatch` + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main with: python-version: ${{ matrix.python-version }} @@ -41,7 +41,7 @@ jobs: shell: bash - name: Publish results - uses: ./.github/actions/publish-results + uses: dbt-labs/dbt-adapters/.github/actions/publish-results@main if: always() with: source-file: "results.csv" diff --git a/pyproject.toml b/pyproject.toml index 9f73d36e8..c9ad0e50c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,7 +99,7 @@ features = [ detached = true features = ["lint"] [tool.hatch.envs.lint.scripts] -all = ["- black", "- flake8", ] +all = ["black", "flake8"] black = "python -m black ." flake8 = "python -m flake8 ." diff --git a/scripts/setup_test_database.sql b/scripts/setup_test_database.sql new file mode 100644 index 000000000..8da2c0be8 --- /dev/null +++ b/scripts/setup_test_database.sql @@ -0,0 +1,16 @@ +CREATE DATABASE dbt; + +CREATE ROLE root WITH PASSWORD 'password'; +ALTER ROLE root WITH LOGIN; +GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION; + +CREATE ROLE noaccess WITH PASSWORD 'password' NOSUPERUSER; +ALTER ROLE noaccess WITH LOGIN; +GRANT CONNECT ON DATABASE dbt TO noaccess; + +CREATE ROLE dbt_test_user_1; +CREATE ROLE dbt_test_user_2; +CREATE ROLE dbt_test_user_3; + +CREATE DATABASE "dbtMixedCase"; +GRANT CREATE, CONNECT ON DATABASE "dbtMixedCase" TO root WITH GRANT OPTION; diff --git a/tests/functional/minimal_cli/test_minimal_cli.py b/tests/functional/minimal_cli/test_minimal_cli.py index 64bcb1f1e..1fccbbd07 100644 --- a/tests/functional/minimal_cli/test_minimal_cli.py +++ b/tests/functional/minimal_cli/test_minimal_cli.py @@ -30,16 +30,6 @@ def test_deps(self, runner, project): assert "1.0.0" in result.output -class TestLS(BaseConfigProject): - def test_ls(self, runner, project): - runner.invoke(cli, ["deps"]) - ls_result = runner.invoke(cli, ["ls"]) - assert "1 seed" in ls_result.output - assert "1 model" in ls_result.output - assert "5 data tests" in ls_result.output - assert "1 snapshot" in ls_result.output - - class TestBuild(BaseConfigProject): def test_build(self, runner, project): runner.invoke(cli, ["deps"]) diff --git a/tests/functional/schema_tests/test_schema_v2_tests.py b/tests/functional/schema_tests/test_schema_v2_tests.py index 79771c50b..a268f0960 100644 --- a/tests/functional/schema_tests/test_schema_v2_tests.py +++ b/tests/functional/schema_tests/test_schema_v2_tests.py @@ -892,24 +892,6 @@ def test_collision_test_names_get_hash( assert test_results[1].node.unique_id in expected_unique_ids -class TestGenericTestsCollide: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": dupe_generic_tests_collide__schema_yml, - "model_a.sql": dupe_generic_tests_collide__model_a, - } - - def test_generic_test_collision( - self, - project, - ): - """These tests collide, since only the configs differ""" - with pytest.raises(DuplicateResourceNameError) as exc: - run_dbt() - assert "dbt found two tests with the name" in str(exc.value) - - class TestGenericTestsConfigCustomMacros: @pytest.fixture(scope="class") def models(self): diff --git a/tests/functional/test_experimental_parser.py b/tests/functional/test_experimental_parser.py index 18ee85257..9064ee4f8 100644 --- a/tests/functional/test_experimental_parser.py +++ b/tests/functional/test_experimental_parser.py @@ -1,7 +1,7 @@ import os +from dbt.context.providers import RefArgs from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import RefArgs import pytest from tests.functional.utils import run_dbt, run_dbt_and_capture diff --git a/tests/functional/test_init.py b/tests/functional/test_init.py index 43f0fcc21..1c3133c84 100644 --- a/tests/functional/test_init.py +++ b/tests/functional/test_init.py @@ -420,55 +420,6 @@ def setup(self, project): os.remove(os.path.join(project.project_root, "dbt_project.yml")) -class TestInitInvalidProjectNameCLI(TestInitOutsideOfProjectBase): - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.confirm") - @patch("click.prompt") - def test_init_invalid_project_name_cli( - self, mock_prompt, mock_confirm, mock_get_adapter, project_name, project - ): - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.attach_mock(mock_confirm, "confirm") - - invalid_name = "name-with-hyphen" - valid_name = project_name - manager.prompt.side_effect = [valid_name] - mock_get_adapter.return_value = [project.adapter.type()] - - run_dbt(["init", invalid_name, "--skip-profile-setup"]) - manager.assert_has_calls( - [ - call.prompt("Enter a name for your project (letters, digits, underscore)"), - ] - ) - - -class TestInitInvalidProjectNamePrompt(TestInitOutsideOfProjectBase): - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.confirm") - @patch("click.prompt") - def test_init_invalid_project_name_prompt( - self, mock_prompt, mock_confirm, mock_get_adapter, project_name, project - ): - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.attach_mock(mock_confirm, "confirm") - - invalid_name = "name-with-hyphen" - valid_name = project_name - manager.prompt.side_effect = [invalid_name, valid_name] - mock_get_adapter.return_value = [project.adapter.type()] - - run_dbt(["init", "--skip-profile-setup"]) - manager.assert_has_calls( - [ - call.prompt("Enter a name for your project (letters, digits, underscore)"), - call.prompt("Enter a name for your project (letters, digits, underscore)"), - ] - ) - - class TestInitInsideProjectAndSkipProfileSetup(TestInitInsideOfProjectBase): @patch("dbt.task.init._get_adapter_plugin_names") @patch("click.confirm") From 317f1e943e401add8a8c92f9e063037c5d63b502 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 30 Jan 2024 13:40:46 -0500 Subject: [PATCH 026/114] Update job names, github uses job name, not workflow name for required status checks (#2) --- .github/workflows/code-quality.yml | 2 +- .github/workflows/integration-tests.yml | 2 +- .github/workflows/unit-tests.yml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index 5079161f2..dedea94ba 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -17,7 +17,7 @@ concurrency: jobs: code-quality: - name: Python 3.8 + name: Code Quality runs-on: ubuntu-latest steps: diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index af04542d8..af2383bd0 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -17,7 +17,7 @@ concurrency: jobs: integration: - name: Python ${{ matrix.python-version }} + name: Integration Tests runs-on: ubuntu-latest strategy: diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index ac42e9acb..69681c0fa 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -17,7 +17,7 @@ concurrency: jobs: unit: - name: Python ${{ matrix.python-version }} + name: Unit Tests runs-on: ubuntu-latest strategy: From 6bf58e8bc7d4a30d5a463c2d1ad2322fb7d54ef6 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Thu, 1 Feb 2024 09:45:31 -0800 Subject: [PATCH 027/114] swap back to __version__.py (#3) --- CONTRIBUTING.md | 1 + .../postgres/{__about__.py => __version__.py} | 0 pyproject.toml | 2 +- tests/functional/dbt_debug/test_dbt_debug.py | 55 ++++++ .../docs/test_duplicate_docs_block.py | 34 ---- tests/functional/docs/test_generate.py | 100 ---------- .../functional/docs/test_good_docs_blocks.py | 177 ------------------ tests/functional/docs/test_invalid_doc_ref.py | 46 ----- .../docs/test_missing_docs_blocks.py | 42 ----- .../docs/test_model_version_docs_blocks.py | 74 -------- tests/functional/docs/test_static.py | 50 ----- 11 files changed, 57 insertions(+), 524 deletions(-) rename dbt/adapters/postgres/{__about__.py => __version__.py} (100%) create mode 100644 tests/functional/dbt_debug/test_dbt_debug.py delete mode 100644 tests/functional/docs/test_duplicate_docs_block.py delete mode 100644 tests/functional/docs/test_generate.py delete mode 100644 tests/functional/docs/test_good_docs_blocks.py delete mode 100644 tests/functional/docs/test_invalid_doc_ref.py delete mode 100644 tests/functional/docs/test_missing_docs_blocks.py delete mode 100644 tests/functional/docs/test_model_version_docs_blocks.py delete mode 100644 tests/functional/docs/test_static.py diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9d2880e8a..f39e4cb5f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -86,6 +86,7 @@ will be reflected in the virtual environment immediately. `dbt-postgres` contains [unit](https://github.com/dbt-labs/dbt-postgres/tree/main/tests/unit) and [functional](https://github.com/dbt-labs/dbt-postgres/tree/main/tests/functional) tests. + ### Unit tests Unit tests can be run locally without setting up a database connection: diff --git a/dbt/adapters/postgres/__about__.py b/dbt/adapters/postgres/__version__.py similarity index 100% rename from dbt/adapters/postgres/__about__.py rename to dbt/adapters/postgres/__version__.py diff --git a/pyproject.toml b/pyproject.toml index c9ad0e50c..31c5ac799 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,7 +84,7 @@ include = ["dbt"] packages = ["dbt"] [tool.hatch.version] -path = "dbt/adapters/postgres/__about__.py" +path = "dbt/adapters/postgres/__version__.py" [tool.hatch.envs.default] features = [ diff --git a/tests/functional/dbt_debug/test_dbt_debug.py b/tests/functional/dbt_debug/test_dbt_debug.py new file mode 100644 index 000000000..3e2a182ee --- /dev/null +++ b/tests/functional/dbt_debug/test_dbt_debug.py @@ -0,0 +1,55 @@ +import pytest +import os +import re +import yaml + +from tests.functional.utils import run_dbt, run_dbt_and_capture + +MODELS__MODEL_SQL = """ +seled 1 as id +""" + + +class BaseDebug: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": MODELS__MODEL_SQL} + + @pytest.fixture(autouse=True) + def capsys(self, capsys): + self.capsys = capsys + + def assertGotValue(self, linepat, result): + found = False + output = self.capsys.readouterr().out + for line in output.split("\n"): + if linepat.match(line): + found = True + assert result in line + if not found: + with pytest.raises(Exception) as exc: + msg = f"linepat {linepat} not found in stdout: {output}" + assert msg in str(exc.value) + + def check_project(self, splitout, msg="ERROR invalid"): + for line in splitout: + if line.strip().startswith("dbt_project.yml file"): + assert msg in line + elif line.strip().startswith("profiles.yml file"): + assert "ERROR invalid" not in line + + +class BaseDebugProfileVariable(BaseDebug): + @pytest.fixture(scope="class") + def project_config_update(self): + return {"config-version": 2, "profile": '{{ "te" ~ "st" }}'} + + +class TestDebugPostgres(BaseDebug): + def test_ok(self, project): + result, log = run_dbt_and_capture(["debug"]) + assert "ERROR" not in log + + +class TestDebugProfileVariablePostgres(BaseDebugProfileVariable): + pass diff --git a/tests/functional/docs/test_duplicate_docs_block.py b/tests/functional/docs/test_duplicate_docs_block.py deleted file mode 100644 index 393980e27..000000000 --- a/tests/functional/docs/test_duplicate_docs_block.py +++ /dev/null @@ -1,34 +0,0 @@ -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -duplicate_doc_blocks_model_sql = "select 1 as id, 'joe' as first_name" - -duplicate_doc_blocks_docs_md = """{% docs my_model_doc %} - a doc string -{% enddocs %} - -{% docs my_model_doc %} - duplicate doc string -{% enddocs %}""" - -duplicate_doc_blocks_schema_yml = """version: 2 - -models: - - name: model - description: "{{ doc('my_model_doc') }}" -""" - - -class TestDuplicateDocsBlock: - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": duplicate_doc_blocks_model_sql, - "schema.yml": duplicate_doc_blocks_schema_yml, - } - - def test_duplicate_doc_ref(self, project): - with pytest.raises(CompilationError): - run_dbt(expect_pass=False) diff --git a/tests/functional/docs/test_generate.py b/tests/functional/docs/test_generate.py deleted file mode 100644 index 1da96f5ba..000000000 --- a/tests/functional/docs/test_generate.py +++ /dev/null @@ -1,100 +0,0 @@ -from dbt.tests.util import get_manifest, run_dbt -import pytest - - -sample_seed = """sample_num,sample_bool -1,true -2,false -3,true -""" - -second_seed = """sample_num,sample_bool -4,true -5,false -6,true -""" - -sample_config = """ -sources: - - name: my_seed - schema: "{{ target.schema }}" - tables: - - name: sample_seed - - name: second_seed - - name: fake_seed -""" - - -class TestBaseGenerate: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": "select 1 as fun", - "alt_model.sql": "select 1 as notfun", - "sample_config.yml": sample_config, - } - - @pytest.fixture(scope="class") - def seeds(self): - return { - "sample_seed.csv": sample_seed, - "second_seed.csv": sample_seed, - } - - -class TestGenerateManifestNotCompiled(TestBaseGenerate): - def test_manifest_not_compiled(self, project): - run_dbt(["docs", "generate", "--no-compile"]) - # manifest.json is written out in parsing now, but it - # shouldn't be compiled because of the --no-compile flag - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - assert model_id in manifest.nodes - assert manifest.nodes[model_id].compiled is False - - -class TestGenerateEmptyCatalog(TestBaseGenerate): - def test_generate_empty_catalog(self, project): - catalog = run_dbt(["docs", "generate", "--empty-catalog"]) - assert catalog.nodes == {}, "nodes should be empty" - assert catalog.sources == {}, "sources should be empty" - assert catalog.errors is None, "errors should be null" - - -class TestGenerateSelectLimitsCatalog(TestBaseGenerate): - def test_select_limits_catalog(self, project): - run_dbt(["run"]) - catalog = run_dbt(["docs", "generate", "--select", "my_model"]) - assert len(catalog.nodes) == 1 - assert "model.test.my_model" in catalog.nodes - - -class TestGenerateSelectLimitsNoMatch(TestBaseGenerate): - def test_select_limits_no_match(self, project): - run_dbt(["run"]) - catalog = run_dbt(["docs", "generate", "--select", "my_missing_model"]) - assert len(catalog.nodes) == 0 - - -class TestGenerateCatalogWithSources(TestBaseGenerate): - def test_catalog_with_sources(self, project): - run_dbt(["build"]) - catalog = run_dbt(["docs", "generate"]) - - # 2 seeds + 2 models - assert len(catalog.nodes) == 4 - # 2 sources (only ones that exist) - assert len(catalog.sources) == 2 - - -class TestGenerateSelectSource(TestBaseGenerate): - def test_select_source(self, project): - run_dbt(["build"]) - catalog = run_dbt(["docs", "generate", "--select", "source:test.my_seed.sample_seed"]) - - # 2 seeds - # TODO: Filtering doesn't work for seeds - assert len(catalog.nodes) == 2 - # 2 sources - # TODO: Filtering doesn't work for sources - assert len(catalog.sources) == 2 diff --git a/tests/functional/docs/test_good_docs_blocks.py b/tests/functional/docs/test_good_docs_blocks.py deleted file mode 100644 index d1ab0f5a1..000000000 --- a/tests/functional/docs/test_good_docs_blocks.py +++ /dev/null @@ -1,177 +0,0 @@ -import json -import os -from pathlib import Path - -from dbt.tests.util import run_dbt, update_config_file, write_file -import pytest - - -good_docs_blocks_model_sql = "select 1 as id, 'joe' as first_name" - -good_docs_blocks_docs_md = """{% docs my_model_doc %} -My model is just a copy of the seed -{% enddocs %} - -{% docs my_model_doc__id %} -The user ID number -{% enddocs %} - -The following doc is never used, which should be fine. -{% docs my_model_doc__first_name %} -The user's first name (should not be shown!) -{% enddocs %} - -This doc is referenced by its full name -{% docs my_model_doc__last_name %} -The user's last name -{% enddocs %} -""" - -good_doc_blocks_alt_docs_md = """{% docs my_model_doc %} -Alt text about the model -{% enddocs %} - -{% docs my_model_doc__id %} -The user ID number with alternative text -{% enddocs %} - -The following doc is never used, which should be fine. -{% docs my_model_doc__first_name %} -The user's first name - don't show this text! -{% enddocs %} - -This doc is referenced by its full name -{% docs my_model_doc__last_name %} -The user's last name in this other file -{% enddocs %} -""" - -good_docs_blocks_schema_yml = """version: 2 - -models: - - name: model - description: "{{ doc('my_model_doc') }}" - columns: - - name: id - description: "{{ doc('my_model_doc__id') }}" - - name: first_name - description: The user's first name - - name: last_name - description: "{{ doc('test', 'my_model_doc__last_name') }}" -""" - - -class TestGoodDocsBlocks: - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": good_docs_blocks_model_sql, - "schema.yml": good_docs_blocks_schema_yml, - "docs.md": good_docs_blocks_docs_md, - } - - def test_valid_doc_ref(self, project): - result = run_dbt() - assert len(result.results) == 1 - - assert os.path.exists("./target/manifest.json") - - with open("./target/manifest.json") as fp: - manifest = json.load(fp) - - model_data = manifest["nodes"]["model.test.model"] - - assert model_data["description"] == "My model is just a copy of the seed" - - assert { - "name": "id", - "description": "The user ID number", - "data_type": None, - "constraints": [], - "meta": {}, - "quote": None, - "tags": [], - } == model_data["columns"]["id"] - - assert { - "name": "first_name", - "description": "The user's first name", - "data_type": None, - "constraints": [], - "meta": {}, - "quote": None, - "tags": [], - } == model_data["columns"]["first_name"] - - assert { - "name": "last_name", - "description": "The user's last name", - "data_type": None, - "constraints": [], - "meta": {}, - "quote": None, - "tags": [], - } == model_data["columns"]["last_name"] - - assert len(model_data["columns"]) == 3 - - -class TestGoodDocsBlocksAltPath: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": good_docs_blocks_model_sql, "schema.yml": good_docs_blocks_schema_yml} - - def test_alternative_docs_path(self, project): - # self.use_default_project({"docs-paths": [self.dir("docs")]}) - docs_path = Path(project.project_root, "alt-docs") - docs_path.mkdir() - write_file(good_doc_blocks_alt_docs_md, project.project_root, "alt-docs", "docs.md") - - update_config_file( - {"docs-paths": [str(docs_path)]}, project.project_root, "dbt_project.yml" - ) - - result = run_dbt() - - assert len(result.results) == 1 - - assert os.path.exists("./target/manifest.json") - - with open("./target/manifest.json") as fp: - manifest = json.load(fp) - - model_data = manifest["nodes"]["model.test.model"] - - assert model_data["description"] == "Alt text about the model" - - assert { - "name": "id", - "description": "The user ID number with alternative text", - "data_type": None, - "constraints": [], - "meta": {}, - "quote": None, - "tags": [], - } == model_data["columns"]["id"] - - assert { - "name": "first_name", - "description": "The user's first name", - "data_type": None, - "constraints": [], - "meta": {}, - "quote": None, - "tags": [], - } == model_data["columns"]["first_name"] - - assert { - "name": "last_name", - "description": "The user's last name in this other file", - "data_type": None, - "constraints": [], - "meta": {}, - "quote": None, - "tags": [], - } == model_data["columns"]["last_name"] - - assert len(model_data["columns"]) == 3 diff --git a/tests/functional/docs/test_invalid_doc_ref.py b/tests/functional/docs/test_invalid_doc_ref.py deleted file mode 100644 index eda72d64f..000000000 --- a/tests/functional/docs/test_invalid_doc_ref.py +++ /dev/null @@ -1,46 +0,0 @@ -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -invalid_doc_ref_model_sql = "select 1 as id, 'joe' as first_name" - -invalid_doc_ref_docs_md = """{% docs my_model_doc %} -My model is just a copy of the seed -{% enddocs %} - -{% docs my_model_doc__id %} -The user ID number -{% enddocs %} - -The following doc is never used, which should be fine. -{% docs my_model_doc__first_name %} -The user's first name -{% enddocs %}""" - -invalid_doc_ref_schema_yml = """version: 2 - -models: - - name: model - description: "{{ doc('my_model_doc') }}" - columns: - - name: id - description: "{{ doc('my_model_doc__id') }}" - - name: first_name - description: "{{ doc('foo.bar.my_model_doc__id') }}" -""" - - -class TestInvalidDocRef: - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": invalid_doc_ref_model_sql, - "docs.md": invalid_doc_ref_docs_md, - "schema.yml": invalid_doc_ref_schema_yml, - } - - def test_invalid_doc_ref(self, project): - # The run should fail since we could not find the docs reference. - with pytest.raises(CompilationError): - run_dbt(expect_pass=False) diff --git a/tests/functional/docs/test_missing_docs_blocks.py b/tests/functional/docs/test_missing_docs_blocks.py deleted file mode 100644 index 1aa231681..000000000 --- a/tests/functional/docs/test_missing_docs_blocks.py +++ /dev/null @@ -1,42 +0,0 @@ -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -missing_docs_blocks_model_sql = "select 1 as id, 'joe' as first_name" - -missing_docs_blocks_docs_md = """{% docs my_model_doc %} -My model is just a copy of the seed -{% enddocs %} - -{% docs my_model_doc__id %} -The user ID number -{% enddocs %}""" - -missing_docs_blocks_schema_yml = """version: 2 - -models: - - name: model - description: "{{ doc('my_model_doc') }}" - columns: - - name: id - description: "{{ doc('my_model_doc__id') }}" - - name: first_name - # invalid reference - description: "{{ doc('my_model_doc__first_name') }}" -""" - - -class TestMissingDocsBlocks: - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": missing_docs_blocks_model_sql, - "schema.yml": missing_docs_blocks_schema_yml, - "docs.md": missing_docs_blocks_docs_md, - } - - def test_missing_doc_ref(self, project): - # The run should fail since we could not find the docs reference. - with pytest.raises(CompilationError): - run_dbt() diff --git a/tests/functional/docs/test_model_version_docs_blocks.py b/tests/functional/docs/test_model_version_docs_blocks.py deleted file mode 100644 index be6100810..000000000 --- a/tests/functional/docs/test_model_version_docs_blocks.py +++ /dev/null @@ -1,74 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - - -model_1 = """ -select 1 as id, 'joe' as first_name -""" - -model_versioned = """ -select 1 as id, 'joe' as first_name -""" - -docs_md = """ -{% docs model_description %} -unversioned model -{% enddocs %} - -{% docs column_id_doc %} -column id for some thing -{% enddocs %} - -{% docs versioned_model_description %} -versioned model -{% enddocs %} - -""" - -schema_yml = """ -models: - - name: model_1 - description: '{{ doc("model_description") }}' - columns: - - name: id - description: '{{ doc("column_id_doc") }}' - - - name: model_versioned - description: '{{ doc("versioned_model_description") }}' - latest_version: 1 - versions: - - v: 1 - config: - alias: my_alias - columns: - - name: id - description: '{{ doc("column_id_doc") }}' - - name: first_name - description: 'plain text' - - v: 2 - columns: - - name: other_id -""" - - -class TestVersionedModelDocsBlock: - @pytest.fixture(scope="class") - def models(self): - return { - "model_1.sql": model_1, - "model_versioned.sql": model_versioned, - "schema.yml": schema_yml, - "docs.md": docs_md, - } - - def test_versioned_doc_ref(self, project): - manifest = run_dbt(["parse"]) - model_1 = manifest.nodes["model.test.model_1"] - model_v1 = manifest.nodes["model.test.model_versioned.v1"] - - assert model_1.description == "unversioned model" - assert model_v1.description == "versioned model" - - assert model_1.columns["id"].description == "column id for some thing" - assert model_v1.columns["id"].description == "column id for some thing" - assert model_v1.columns["first_name"].description == "plain text" diff --git a/tests/functional/docs/test_static.py b/tests/functional/docs/test_static.py deleted file mode 100644 index 589668bd9..000000000 --- a/tests/functional/docs/test_static.py +++ /dev/null @@ -1,50 +0,0 @@ -import os - -from dbt.task.docs import DOCS_INDEX_FILE_PATH -from dbt.tests.util import run_dbt -from dbt_common.clients.system import load_file_contents -import pytest - - -class TestStaticGenerate: - @pytest.fixture(scope="class") - def models(self): - return {"my_model.sql": "select 1 as fun"} - - def test_static_generated(self, project): - run_dbt(["docs", "generate", "--static"]) - - source_index_html = load_file_contents(DOCS_INDEX_FILE_PATH) - - target_index_html = load_file_contents( - os.path.join(project.project_root, "target", "index.html") - ) - - # Validate index.html was copied correctly - assert len(target_index_html) == len(source_index_html) - assert hash(target_index_html) == hash(source_index_html) - - manifest_data = load_file_contents( - os.path.join(project.project_root, "target", "manifest.json") - ) - - catalog_data = load_file_contents( - os.path.join(project.project_root, "target", "catalog.json") - ) - - static_index_html = load_file_contents( - os.path.join(project.project_root, "target", "static_index.html") - ) - - # Calculate expected static_index.html - expected_static_index_html = source_index_html - expected_static_index_html = expected_static_index_html.replace( - '"MANIFEST.JSON INLINE DATA"', manifest_data - ) - expected_static_index_html = expected_static_index_html.replace( - '"CATALOG.JSON INLINE DATA"', catalog_data - ) - - # Validate static_index.html was generated correctly - assert len(expected_static_index_html) == len(static_index_html) - assert hash(expected_static_index_html) == hash(static_index_html) From deb4e09375a182a70a525bf7a38e784f8f3abd39 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Tue, 6 Feb 2024 09:36:13 -0800 Subject: [PATCH 028/114] Temporarily default to psycopg2 (#5) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 31c5ac799..3240b861c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ classifiers = [ ] dependencies = [ "dbt-adapters", - "psycopg2-binary~=2.9", + "psycopg2~=2.9", # installed via dbt-adapters but used directly, unpin minor to avoid version conflicts "dbt-common<1.0", "agate<2.0", From 3b8647cc320dd8bae6f377757ebfe3fff7888290 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Mon, 12 Feb 2024 14:49:09 -0500 Subject: [PATCH 029/114] Remove experimental parser tests (#10) --- tests/functional/test_experimental_parser.py | 304 ------------------- tests/unit/test_adapter.py | 46 --- 2 files changed, 350 deletions(-) delete mode 100644 tests/functional/test_experimental_parser.py diff --git a/tests/functional/test_experimental_parser.py b/tests/functional/test_experimental_parser.py deleted file mode 100644 index 9064ee4f8..000000000 --- a/tests/functional/test_experimental_parser.py +++ /dev/null @@ -1,304 +0,0 @@ -import os - -from dbt.context.providers import RefArgs -from dbt.contracts.graph.manifest import Manifest -import pytest - -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -def get_manifest(): - path = "./target/partial_parse.msgpack" - if os.path.exists(path): - with open(path, "rb") as fp: - manifest_mp = fp.read() - manifest: Manifest = Manifest.from_msgpack(manifest_mp) - return manifest - else: - return None - - -basic__schema_yml = """ -version: 2 - -sources: - - name: my_src - schema: "{{ target.schema }}" - tables: - - name: my_tbl - -models: - - name: model_a - columns: - - name: fun - -""" - -basic__model_a_sql = """ -{{ config(tags='hello', x=False) }} -{{ config(tags='world', x=True) }} - -select * from {{ ref('model_b') }} -cross join {{ source('my_src', 'my_tbl') }} -where false as boop - -""" - -basic__model_b_sql = """ -select 1 as fun -""" - - -ref_macro__schema_yml = """ -version: 2 - -""" - -ref_macro__models__model_a_sql = """ -select 1 as id - -""" - -source_macro__macros__source_sql = """ -{% macro source(source_name, table_name) %} - -{% endmacro %} -""" - -source_macro__schema_yml = """ -version: 2 - -""" - -source_macro__models__model_a_sql = """ -select 1 as id - -""" - -config_macro__macros__config_sql = """ -{% macro config() %} - -{% endmacro %} -""" - -config_macro__schema_yml = """ -version: 2 - -""" - -config_macro__models__model_a_sql = """ -select 1 as id - -""" - - -class BasicExperimentalParser: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": basic__model_a_sql, - "model_b.sql": basic__model_b_sql, - "schema.yml": basic__schema_yml, - } - - -class TestBasicExperimentalParserFlag(BasicExperimentalParser): - @pytest.fixture(scope="class", autouse=True) - def setup(self, project): - os.environ["DBT_USE_EXPERIMENTAL_PARSER"] = "true" - yield - del os.environ["DBT_USE_EXPERIMENTAL_PARSER"] - - def test_env_use_experimental_parser(self, project): - _, log_output = run_dbt_and_capture(["--debug", "parse"]) - - # successful stable static parsing - assert not ("1699: " in log_output) - # successful experimental static parsing - assert "1698: " in log_output - # experimental parser failed - assert not ("1604: " in log_output) - # static parser failed - assert not ("1603: " in log_output) - # jinja rendering - assert not ("1602: " in log_output) - - -class TestBasicStaticParserFlag(BasicExperimentalParser): - @pytest.fixture(scope="class", autouse=True) - def setup(self, project): - os.environ["DBT_STATIC_PARSER"] = "false" - yield - del os.environ["DBT_STATIC_PARSER"] - - def test_env_static_parser(self, project): - _, log_output = run_dbt_and_capture(["--debug", "parse"]) - - print(log_output) - - # jinja rendering because of --no-static-parser - assert "1605: " in log_output - # successful stable static parsing - assert not ("1699: " in log_output) - # successful experimental static parsing - assert not ("1698: " in log_output) - # experimental parser failed - assert not ("1604: " in log_output) - # static parser failed - assert not ("1603: " in log_output) - # fallback jinja rendering - assert not ("1602: " in log_output) - - -class TestBasicExperimentalParser(BasicExperimentalParser): - # test that the experimental parser extracts some basic ref, source, and config calls. - def test_experimental_parser_basic( - self, - project, - ): - run_dbt(["--use-experimental-parser", "parse"]) - manifest = get_manifest() - node = manifest.nodes["model.test.model_a"] - assert node.refs == [RefArgs(name="model_b")] - assert node.sources == [["my_src", "my_tbl"]] - assert node.config._extra == {"x": True} - assert node.config.tags == ["hello", "world"] - - -class TestBasicStaticParser(BasicExperimentalParser): - # test that the static parser extracts some basic ref, source, and config calls by default - # without the experimental flag and without rendering jinja - def test_static_parser_basic(self, project): - _, log_output = run_dbt_and_capture(["--debug", "parse"]) - - # successful stable static parsing - assert "1699: " in log_output - # successful experimental static parsing - assert not ("1698: " in log_output) - # experimental parser failed - assert not ("1604: " in log_output) - # static parser failed - assert not ("1603: " in log_output) - # jinja rendering - assert not ("1602: " in log_output) - - manifest = get_manifest() - node = manifest.nodes["model.test.model_a"] - assert node.refs == [RefArgs(name="model_b")] - assert node.sources == [["my_src", "my_tbl"]] - assert node.config._extra == {"x": True} - assert node.config.tags == ["hello", "world"] - - -class TestBasicNoStaticParser(BasicExperimentalParser): - # test that the static parser doesn't run when the flag is set - def test_static_parser_is_disabled(self, project): - _, log_output = run_dbt_and_capture(["--debug", "--no-static-parser", "parse"]) - - # jinja rendering because of --no-static-parser - assert "1605: " in log_output - # successful stable static parsing - assert not ("1699: " in log_output) - # successful experimental static parsing - assert not ("1698: " in log_output) - # experimental parser failed - assert not ("1604: " in log_output) - # static parser failed - assert not ("1603: " in log_output) - # fallback jinja rendering - assert not ("1602: " in log_output) - - -class TestRefOverrideExperimentalParser: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": ref_macro__models__model_a_sql, - "schema.yml": ref_macro__schema_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "source.sql": source_macro__macros__source_sql, - } - - # test that the experimental parser doesn't run if the ref built-in is overriden with a macro - def test_experimental_parser_ref_override( - self, - project, - ): - _, log_output = run_dbt_and_capture(["--debug", "--use-experimental-parser", "parse"]) - - print(log_output) - - # successful experimental static parsing - assert not ("1698: " in log_output) - # fallback to jinja rendering - assert "1602: " in log_output - # experimental parser failed - assert not ("1604: " in log_output) - # didn't run static parser because dbt detected a built-in macro override - assert "1601: " in log_output - - -class TestSourceOverrideExperimentalParser: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": source_macro__models__model_a_sql, - "schema.yml": source_macro__schema_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "source.sql": source_macro__macros__source_sql, - } - - # test that the experimental parser doesn't run if the source built-in is overriden with a macro - def test_experimental_parser_source_override( - self, - project, - ): - _, log_output = run_dbt_and_capture(["--debug", "--use-experimental-parser", "parse"]) - - # successful experimental static parsing - assert not ("1698: " in log_output) - # fallback to jinja rendering - assert "1602: " in log_output - # experimental parser failed - assert not ("1604: " in log_output) - # didn't run static parser because dbt detected a built-in macro override - assert "1601: " in log_output - - -class TestConfigOverrideExperimentalParser: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": config_macro__models__model_a_sql, - "schema.yml": config_macro__schema_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "config.sql": config_macro__macros__config_sql, - } - - # test that the experimental parser doesn't run if the config built-in is overriden with a macro - def test_experimental_parser_config_override( - self, - project, - ): - _, log_output = run_dbt_and_capture(["--debug", "--use-experimental-parser", "parse"]) - - # successful experimental static parsing - assert not ("1698: " in log_output) - # fallback to jinja rendering - assert "1602: " in log_output - # experimental parser failed - assert not ("1604: " in log_output) - # didn't run static parser because dbt detected a built-in macro override - assert "1601: " in log_output diff --git a/tests/unit/test_adapter.py b/tests/unit/test_adapter.py index be2eef140..d73ed54c3 100644 --- a/tests/unit/test_adapter.py +++ b/tests/unit/test_adapter.py @@ -1,10 +1,6 @@ -import dataclasses from multiprocessing import get_context from unittest import TestCase, mock -import agate -from dbt.adapters.base import BaseRelation -from dbt.adapters.contracts.relation import Path from dbt_common.exceptions import DbtValidationError from dbt.adapters.postgres import Plugin as PostgresPlugin, PostgresAdapter @@ -306,45 +302,3 @@ def test_set_zero_keepalive(self, psycopg2): connect_timeout=10, application_name="dbt", ) - - @mock.patch.object(PostgresAdapter, "execute_macro") - @mock.patch.object(PostgresAdapter, "_get_catalog_relations") - def test_get_catalog_various_schemas(self, mock_get_relations, mock_execute): - self.catalog_test(mock_get_relations, mock_execute, False) - - @mock.patch.object(PostgresAdapter, "execute_macro") - @mock.patch.object(PostgresAdapter, "_get_catalog_relations") - def test_get_filtered_catalog(self, mock_get_relations, mock_execute): - self.catalog_test(mock_get_relations, mock_execute, True) - - def catalog_test(self, mock_get_relations, mock_execute, filtered=False): - column_names = ["table_database", "table_schema", "table_name"] - relations = [ - BaseRelation(path=Path(database="dbt", schema="foo", identifier="bar")), - BaseRelation(path=Path(database="dbt", schema="FOO", identifier="baz")), - BaseRelation(path=Path(database="dbt", schema=None, identifier="bar")), - BaseRelation(path=Path(database="dbt", schema="quux", identifier="bar")), - BaseRelation(path=Path(database="dbt", schema="skip", identifier="bar")), - ] - rows = list(map(lambda x: dataclasses.astuple(x.path), relations)) - mock_execute.return_value = agate.Table(rows=rows, column_names=column_names) - - mock_get_relations.return_value = relations - - relation_configs = [] - used_schemas = {("dbt", "foo"), ("dbt", "quux")} - - if filtered: - catalog, exceptions = self.adapter.get_filtered_catalog( - relation_configs, used_schemas, set([relations[0], relations[3]]) - ) - else: - catalog, exceptions = self.adapter.get_catalog(relation_configs, used_schemas) - - tupled_catalog = set(map(tuple, catalog)) - if filtered: - self.assertEqual(tupled_catalog, {rows[0], rows[3]}) - else: - self.assertEqual(tupled_catalog, {rows[0], rows[1], rows[3]}) - - self.assertEqual(exceptions, []) From cb1bdd578a556e39eea5f7b847c3aaeec70629c5 Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Mon, 12 Feb 2024 14:01:57 -0700 Subject: [PATCH 030/114] GitHub Action to add/remove triage labels as-needed (#7) Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- .github/workflows/triage-labels.yml | 31 +++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 .github/workflows/triage-labels.yml diff --git a/.github/workflows/triage-labels.yml b/.github/workflows/triage-labels.yml new file mode 100644 index 000000000..c693eb485 --- /dev/null +++ b/.github/workflows/triage-labels.yml @@ -0,0 +1,31 @@ +# **what?** +# When the maintenance team triages, we sometimes need more information from the issue creator. In +# those cases we remove the `triage` label and add the `awaiting_response` label. Once we +# recieve a response in the form of a comment, we want the `awaiting_response` label removed +# in favor of the `triage` label so we are aware that the issue needs action. + +# **why?** +# To help with out team triage issue tracking + +# **when?** +# This will run when a comment is added to an issue and that issue has the `awaiting_response` label. + +name: Update Triage Label + +on: issue_comment + +defaults: + run: + shell: bash + +permissions: + issues: write + +jobs: + triage_label: + if: contains(github.event.issue.labels.*.name, 'awaiting_response') + uses: dbt-labs/actions/.github/workflows/swap-labels.yml@main + with: + add_label: "triage" + remove_label: "awaiting_response" + secrets: inherit From 33560531bf42ef71c23792296077d763d929d0f3 Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Mon, 12 Feb 2024 14:33:22 -0700 Subject: [PATCH 031/114] GitHub Action to close issues as stale as-needed (#8) Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- .github/workflows/stale.yml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 .github/workflows/stale.yml diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 000000000..75a14dd46 --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,30 @@ +# **what?** +# For issues that have been open for awhile without activity, label +# them as stale with a warning that they will be closed out. If +# anyone comments to keep the issue open, it will automatically +# remove the stale label and keep it open. + +# Stale label rules: +# awaiting_response, more_information_needed -> 90 days +# good_first_issue, help_wanted -> 360 days (a year) +# tech_debt -> 720 (2 years) +# all else defaults -> 180 days (6 months) + +# **why?** +# To keep the repo in a clean state from issues that aren't relevant anymore + +# **when?** +# Once a day + +name: "Close stale issues and PRs" +on: + schedule: + - cron: "30 1 * * *" + +permissions: + issues: write + pull-requests: write + +jobs: + stale: + uses: dbt-labs/actions/.github/workflows/stale-bot-matrix.yml@main From 5fa52caa48c1940d5512e5db10dc71fc51e7e749 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 15 Feb 2024 12:43:43 -0500 Subject: [PATCH 032/114] Point testing dependencies to `main` (#12) --- pyproject.toml | 99 +++++++++++++++++--------------------- tests/unit/test_adapter.py | 48 ++++++++++++++++++ 2 files changed, 91 insertions(+), 56 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3240b861c..71ef119c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,44 +23,12 @@ classifiers = [ "Programming Language :: Python :: 3.11", ] dependencies = [ - "dbt-adapters", - "psycopg2~=2.9", - # installed via dbt-adapters but used directly, unpin minor to avoid version conflicts + "dbt-adapters>=0.1.0a6,<0.2.0", + "psycopg2>=2.9,<3.0", + # installed via dbt-adapters but used directly "dbt-common<1.0", - "agate<2.0", + "agate>=1.0,<2.0", ] -[project.optional-dependencies] -dev = [ - "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", -] -lint = [ - "black", - "flake8", - "Flake8-pyproject", -] -typecheck = [ - "mypy", - "types-protobuf", - "types-pytz", -] -test = [ - # TODO: remove `dbt-core` dependencies from unit tests - "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", - "freezegun", - "pytest", - "pytest-dotenv", - "pytest-mock", - "pytest-xdist", -] -integration = [ - "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", -] -build = [ - "wheel", - "twine", - "check-wheel-contents", -] - [project.urls] Homepage = "https://github.com/dbt-labs/dbt-postgres" Documentation = "https://docs.getdbt.com" @@ -72,11 +40,6 @@ Changelog = "https://github.com/dbt-labs/dbt-postgres/blob/main/CHANGELOG.md" requires = ["hatchling"] build-backend = "hatchling.build" -# TODO: this is needed to install from github in optoinal-dependencies -# alternatively, we can stick the github dependencies directly in the hatch envs -[tool.hatch.metadata] -allow-direct-references = true - [tool.hatch.build.targets.sdist] include = ["dbt"] @@ -87,44 +50,68 @@ packages = ["dbt"] path = "dbt/adapters/postgres/__version__.py" [tool.hatch.envs.default] -features = [ - "lint", - "typecheck", - "test", - "integration", - "build", +dependencies = [ + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", + "dbt_common @ git+https://github.com/dbt-labs/dbt-common.git", ] [tool.hatch.envs.lint] detached = true -features = ["lint"] +dependencies = [ + "black", + "flake8", + "Flake8-pyproject", +] [tool.hatch.envs.lint.scripts] -all = ["black", "flake8"] +all = [ + "black", + "flake8", +] black = "python -m black ." flake8 = "python -m flake8 ." [tool.hatch.envs.typecheck] -features = ["typecheck"] +dependencies = [ + "mypy", + "types-protobuf", + "types-pytz", +] [tool.hatch.envs.typecheck.scripts] all = "python -m mypy ." [tool.hatch.envs.unit-tests] -# TODO: confirm this works for production testing or add appropriate hatch envs -features = ["dev", "test"] +dependencies = [ + # TODO: remove `dbt-core` dependencies from unit tests + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", + "freezegun", + "pytest", + "pytest-dotenv", + "pytest-mock", + "pytest-xdist", +] [tool.hatch.envs.unit-tests.scripts] all = "python -m pytest {args:tests/unit}" [tool.hatch.envs.integration-tests] -# TODO: confirm this works for production testing or add appropriate hatch envs -features = ["dev", "test", "integration"] +template = "unit-tests" +extra-dependencies = [ + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", +] [tool.hatch.envs.integration-tests.scripts] all = "python -m pytest {args:tests/functional}" [tool.hatch.envs.build] detached = true -features = ["build"] +dependencies = [ + "wheel", + "twine", + "check-wheel-contents", +] [tool.hatch.envs.build.scripts] -check-all = ["- check-wheel", "- check-sdist"] +check-all = [ + "- check-wheel", + "- check-sdist", +] check-wheel = [ "twine check dist/*", "find ./dist/dbt_postgres-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", diff --git a/tests/unit/test_adapter.py b/tests/unit/test_adapter.py index d73ed54c3..1c375ac03 100644 --- a/tests/unit/test_adapter.py +++ b/tests/unit/test_adapter.py @@ -1,6 +1,11 @@ +import dataclasses from multiprocessing import get_context from unittest import TestCase, mock +import agate +from dbt.adapters.base import BaseRelation +from dbt.adapters.contracts.relation import Path +from dbt_common.context import set_invocation_context from dbt_common.exceptions import DbtValidationError from dbt.adapters.postgres import Plugin as PostgresPlugin, PostgresAdapter @@ -302,3 +307,46 @@ def test_set_zero_keepalive(self, psycopg2): connect_timeout=10, application_name="dbt", ) + + @mock.patch.object(PostgresAdapter, "execute_macro") + @mock.patch.object(PostgresAdapter, "_get_catalog_relations") + def test_get_catalog_various_schemas(self, mock_get_relations, mock_execute): + self.catalog_test(mock_get_relations, mock_execute, False) + + @mock.patch.object(PostgresAdapter, "execute_macro") + @mock.patch.object(PostgresAdapter, "_get_catalog_relations") + def test_get_filtered_catalog(self, mock_get_relations, mock_execute): + self.catalog_test(mock_get_relations, mock_execute, True) + + def catalog_test(self, mock_get_relations, mock_execute, filtered=False): + column_names = ["table_database", "table_schema", "table_name"] + relations = [ + BaseRelation(path=Path(database="dbt", schema="foo", identifier="bar")), + BaseRelation(path=Path(database="dbt", schema="FOO", identifier="baz")), + BaseRelation(path=Path(database="dbt", schema=None, identifier="bar")), + BaseRelation(path=Path(database="dbt", schema="quux", identifier="bar")), + BaseRelation(path=Path(database="dbt", schema="skip", identifier="bar")), + ] + rows = list(map(lambda x: dataclasses.astuple(x.path), relations)) + mock_execute.return_value = agate.Table(rows=rows, column_names=column_names) + + mock_get_relations.return_value = relations + + relation_configs = [] + used_schemas = {("dbt", "foo"), ("dbt", "quux")} + + set_invocation_context({}) + if filtered: + catalog, exceptions = self.adapter.get_filtered_catalog( + relation_configs, used_schemas, set([relations[0], relations[3]]) + ) + else: + catalog, exceptions = self.adapter.get_catalog(relation_configs, used_schemas) + + tupled_catalog = set(map(tuple, catalog)) + if filtered: + self.assertEqual(tupled_catalog, {rows[0], rows[3]}) + else: + self.assertEqual(tupled_catalog, {rows[0], rows[1], rows[3]}) + + self.assertEqual(exceptions, []) From 086fa8b9d4ea610f6e0063b8c53e330afa8ef7b4 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Tue, 20 Feb 2024 11:59:54 -0800 Subject: [PATCH 033/114] Update test requirements in pyproject.toml via bash script (#15) Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- .github/scripts/update_dev_packages.sh | 17 +++++++++++++ .github/workflows/code-quality.yml | 26 +++++++++++++++++++- .github/workflows/integration-tests.yml | 32 +++++++++++++++++++++++++ 3 files changed, 74 insertions(+), 1 deletion(-) create mode 100755 .github/scripts/update_dev_packages.sh diff --git a/.github/scripts/update_dev_packages.sh b/.github/scripts/update_dev_packages.sh new file mode 100755 index 000000000..c0f207b4e --- /dev/null +++ b/.github/scripts/update_dev_packages.sh @@ -0,0 +1,17 @@ +#!/bin/bash -e +set -e + + +adapters_git_branch=$1 +core_git_branch=$2 +target_req_file="pyproject.toml" +core_req_sed_pattern="s|dbt-core.git.*#subdirectory=core|dbt-core.git@${core_git_branch}#subdirectory=core|g" +adapters_req_sed_pattern="s|dbt-adapters.git|dbt-adapters.git@${adapters_git_branch}|g" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "$core_req_sed_pattern" $target_req_file + sed -i "" "$adapters_req_sed_pattern" $target_req_file +else + sed -i "$core_req_sed_pattern" $target_req_file + sed -i "$adapters_req_sed_pattern" $target_req_file +fi diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index dedea94ba..081079fc3 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -7,6 +7,19 @@ on: - "*.latest" pull_request: workflow_dispatch: + inputs: + dbt_adapters_branch: + description: "The branch of dbt-adapters to evaluate" + type: string + required: true + default: "main" + workflow_call: + inputs: + dbt_adapters_branch: + description: "The branch of dbt-adapters to evaluate" + type: string + required: true + default: "main" permissions: read-all @@ -14,18 +27,29 @@ permissions: read-all concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} cancel-in-progress: true - +env: + # set DBT_ADAPTERS_BRANCH to the input value if the event is a workflow_dispatch (workflow_call uses the same event_name), + # otherwise use 'main' + DBT_ADAPTERS_BRANCH: ${{ github.event_name == 'workflow_dispatch' && inputs.dbt_adapters_branch || 'main' }} jobs: code-quality: name: Code Quality runs-on: ubuntu-latest + steps: - name: Check out repository uses: actions/checkout@v4 with: persist-credentials: false + - name: Update Adapters and Core branches + shell: bash + run: | + ./.github/scripts/update_dev_packages.sh \ + $DBT_ADAPTERS_BRANCH \ + "main" + - name: Setup `hatch` uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index af2383bd0..874001c48 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -7,6 +7,30 @@ on: - "*.latest" pull_request: workflow_dispatch: + inputs: + dbt_adapters_branch: + description: "The branch of dbt-adapters to use" + type: string + required: false + default: "main" + core_branch: + description: "The branch of dbt-core to use" + type: string + required: false + default: "main" + workflow_call: + inputs: + dbt_adapters_branch: + description: "The branch of dbt-adapters to use" + type: string + required: false + default: "main" + core_branch: + description: "The branch of dbt-core to use" + type: string + required: false + default: "main" + permissions: read-all @@ -42,6 +66,14 @@ jobs: - name: Check out repository uses: actions/checkout@v4 + - name: Update Adapters and Core branches + if: ${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch'}} + shell: bash + run: | + ./.github/scripts/update_dev_packages.sh \ + ${{ github.event.inputs.dbt_adapters_branch }} \ + ${{ github.event.inputs.core_branch }} + - name: Setup postgres shell: bash run: psql -f ./scripts/setup_test_database.sql From 57d2e7046e33f077add42e0121ca6fd5cecc8d8c Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:46:59 -0500 Subject: [PATCH 034/114] Setup shared tests (#18) --- pyproject.toml | 23 +- test.env.example | 4 + tests/functional/shared_tests/__init__.py | 1 + tests/functional/shared_tests/seed_bom.csv | 501 ++++++++++++++++++ tests/functional/shared_tests/test_aliases.py | 22 + tests/functional/shared_tests/test_basic.py | 79 +++ tests/functional/shared_tests/test_caching.py | 22 + tests/functional/shared_tests/test_clone.py | 20 + .../shared_tests/test_column_types.py | 5 + .../shared_tests/test_concurrency.py | 5 + .../shared_tests/test_constraints.py | 64 +++ .../shared_tests/test_data_types.py | 35 ++ tests/functional/shared_tests/test_debug.py | 12 + tests/functional/shared_tests/test_empty.py | 5 + .../functional/shared_tests/test_ephemeral.py | 17 + tests/functional/shared_tests/test_grants.py | 25 + tests/functional/shared_tests/test_hooks.py | 76 +++ .../shared_tests/test_incremental.py | 24 + .../shared_tests/test_persist_docs.py | 17 + .../shared_tests/test_query_comment.py | 32 ++ .../functional/shared_tests/test_relations.py | 10 + tests/functional/shared_tests/test_show.py | 12 + .../shared_tests/test_simple_copy.py | 17 + .../shared_tests/test_simple_seed.py | 69 +++ .../shared_tests/test_simple_snapshot.py | 12 + .../shared_tests/test_store_test_failures.py | 7 + .../shared_tests/test_unit_testing.py | 15 + tests/functional/shared_tests/test_utils.py | 174 ++++++ 28 files changed, 1300 insertions(+), 5 deletions(-) create mode 100644 tests/functional/shared_tests/__init__.py create mode 100644 tests/functional/shared_tests/seed_bom.csv create mode 100644 tests/functional/shared_tests/test_aliases.py create mode 100644 tests/functional/shared_tests/test_basic.py create mode 100644 tests/functional/shared_tests/test_caching.py create mode 100644 tests/functional/shared_tests/test_clone.py create mode 100644 tests/functional/shared_tests/test_column_types.py create mode 100644 tests/functional/shared_tests/test_concurrency.py create mode 100644 tests/functional/shared_tests/test_constraints.py create mode 100644 tests/functional/shared_tests/test_data_types.py create mode 100644 tests/functional/shared_tests/test_debug.py create mode 100644 tests/functional/shared_tests/test_empty.py create mode 100644 tests/functional/shared_tests/test_ephemeral.py create mode 100644 tests/functional/shared_tests/test_grants.py create mode 100644 tests/functional/shared_tests/test_hooks.py create mode 100644 tests/functional/shared_tests/test_incremental.py create mode 100644 tests/functional/shared_tests/test_persist_docs.py create mode 100644 tests/functional/shared_tests/test_query_comment.py create mode 100644 tests/functional/shared_tests/test_relations.py create mode 100644 tests/functional/shared_tests/test_show.py create mode 100644 tests/functional/shared_tests/test_simple_copy.py create mode 100644 tests/functional/shared_tests/test_simple_seed.py create mode 100644 tests/functional/shared_tests/test_simple_snapshot.py create mode 100644 tests/functional/shared_tests/test_store_test_failures.py create mode 100644 tests/functional/shared_tests/test_unit_testing.py create mode 100644 tests/functional/shared_tests/test_utils.py diff --git a/pyproject.toml b/pyproject.toml index 71ef119c8..6b58ab143 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,7 @@ path = "dbt/adapters/postgres/__version__.py" [tool.hatch.envs.default] dependencies = [ "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", - "dbt_common @ git+https://github.com/dbt-labs/dbt-common.git", + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", ] [tool.hatch.envs.lint] @@ -81,22 +81,34 @@ all = "python -m mypy ." [tool.hatch.envs.unit-tests] dependencies = [ - # TODO: remove `dbt-core` dependencies from unit tests - "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", "freezegun", "pytest", "pytest-dotenv", "pytest-mock", "pytest-xdist", ] +extra-dependencies = [ + # TODO: remove `dbt-core` dependencies from unit tests + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", +] [tool.hatch.envs.unit-tests.scripts] all = "python -m pytest {args:tests/unit}" [tool.hatch.envs.integration-tests] template = "unit-tests" extra-dependencies = [ + # TODO: remove `dbt-core` dependencies from integration tests + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", ] +[tool.hatch.envs.integration-tests.env-vars] +DBT_TEST_USER_1 = "dbt_test_user_1" +DBT_TEST_USER_2 = "dbt_test_user_2" +DBT_TEST_USER_3 = "dbt_test_user_3" [tool.hatch.envs.integration-tests.scripts] all = "python -m pytest {args:tests/functional}" @@ -130,7 +142,7 @@ target-version = ['py38'] [tool.flake8] select = ["E", "W", "F"] ignore = ["E203", "E501", "E741", "W503", "W504"] -exclude = ["tests", "venv"] +exclude = ["tests", "venv", ".hatch_venvs"] per-file-ignores = ["*/__init__.py: F401"] [tool.mypy] @@ -145,7 +157,8 @@ files = [ ] exclude = [ "tests/functional", - "venv" + "venv", + ".hatch_venvs", ] [tool.pytest] diff --git a/test.env.example b/test.env.example index b4d04412d..1f3da25c4 100644 --- a/test.env.example +++ b/test.env.example @@ -4,3 +4,7 @@ POSTGRES_TEST_USER= POSTGRES_TEST_PASS= POSTGRES_TEST_DATABASE= POSTGRES_TEST_THREADS= + +DBT_TEST_USER_1=dbt_test_user_1 +DBT_TEST_USER_2=dbt_test_user_2 +DBT_TEST_USER_3=dbt_test_user_3 diff --git a/tests/functional/shared_tests/__init__.py b/tests/functional/shared_tests/__init__.py new file mode 100644 index 000000000..0dad1cca5 --- /dev/null +++ b/tests/functional/shared_tests/__init__.py @@ -0,0 +1 @@ +# this file namespaces the test files within to avoid naming collision for the test collector diff --git a/tests/functional/shared_tests/seed_bom.csv b/tests/functional/shared_tests/seed_bom.csv new file mode 100644 index 000000000..a040382e7 --- /dev/null +++ b/tests/functional/shared_tests/seed_bom.csv @@ -0,0 +1,501 @@ +seed_id,first_name,email,ip_address,birthday +1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 +2,Larry,lperkins1@toplist.cz,64.210.133.162,1978-05-09 04:15:14 +3,Anna,amontgomery2@miitbeian.gov.cn,168.104.64.114,2011-10-16 04:07:57 +4,Sandra,sgeorge3@livejournal.com,229.235.252.98,1973-07-19 10:52:43 +5,Fred,fwoods4@google.cn,78.229.170.124,2012-09-30 16:38:29 +6,Stephen,shanson5@livejournal.com,182.227.157.105,1995-11-07 21:40:50 +7,William,wmartinez6@upenn.edu,135.139.249.50,1982-09-05 03:11:59 +8,Jessica,jlong7@hao123.com,203.62.178.210,1991-10-16 11:03:15 +9,Douglas,dwhite8@tamu.edu,178.187.247.1,1979-10-01 09:49:48 +10,Lisa,lcoleman9@nydailynews.com,168.234.128.249,2011-05-26 07:45:49 +11,Ralph,rfieldsa@home.pl,55.152.163.149,1972-11-18 19:06:11 +12,Louise,lnicholsb@samsung.com,141.116.153.154,2014-11-25 20:56:14 +13,Clarence,cduncanc@sfgate.com,81.171.31.133,2011-11-17 07:02:36 +14,Daniel,dfranklind@omniture.com,8.204.211.37,1980-09-13 00:09:04 +15,Katherine,klanee@auda.org.au,176.96.134.59,1997-08-22 19:36:56 +16,Billy,bwardf@wikia.com,214.108.78.85,2003-10-19 02:14:47 +17,Annie,agarzag@ocn.ne.jp,190.108.42.70,1988-10-28 15:12:35 +18,Shirley,scolemanh@fastcompany.com,109.251.164.84,1988-08-24 10:50:57 +19,Roger,rfrazieri@scribd.com,38.145.218.108,1985-12-31 15:17:15 +20,Lillian,lstanleyj@goodreads.com,47.57.236.17,1970-06-08 02:09:05 +21,Aaron,arodriguezk@nps.gov,205.245.118.221,1985-10-11 23:07:49 +22,Patrick,pparkerl@techcrunch.com,19.8.100.182,2006-03-29 12:53:56 +23,Phillip,pmorenom@intel.com,41.38.254.103,2011-11-07 15:35:43 +24,Henry,hgarcian@newsvine.com,1.191.216.252,2008-08-28 08:30:44 +25,Irene,iturnero@opera.com,50.17.60.190,1994-04-01 07:15:02 +26,Andrew,adunnp@pen.io,123.52.253.176,2000-11-01 06:03:25 +27,David,dgutierrezq@wp.com,238.23.203.42,1988-01-25 07:29:18 +28,Henry,hsanchezr@cyberchimps.com,248.102.2.185,1983-01-01 13:36:37 +29,Evelyn,epetersons@gizmodo.com,32.80.46.119,1979-07-16 17:24:12 +30,Tammy,tmitchellt@purevolume.com,249.246.167.88,2001-04-03 10:00:23 +31,Jacqueline,jlittleu@domainmarket.com,127.181.97.47,1986-02-11 21:35:50 +32,Earl,eortizv@opera.com,166.47.248.240,1996-07-06 08:16:27 +33,Juan,jgordonw@sciencedirect.com,71.77.2.200,1987-01-31 03:46:44 +34,Diane,dhowellx@nyu.edu,140.94.133.12,1994-06-11 02:30:05 +35,Randy,rkennedyy@microsoft.com,73.255.34.196,2005-05-26 20:28:39 +36,Janice,jriveraz@time.com,22.214.227.32,1990-02-09 04:16:52 +37,Laura,lperry10@diigo.com,159.148.145.73,2015-03-17 05:59:25 +38,Gary,gray11@statcounter.com,40.193.124.56,1970-01-27 10:04:51 +39,Jesse,jmcdonald12@typepad.com,31.7.86.103,2009-03-14 08:14:29 +40,Sandra,sgonzalez13@goodreads.com,223.80.168.239,1993-05-21 14:08:54 +41,Scott,smoore14@archive.org,38.238.46.83,1980-08-30 11:16:56 +42,Phillip,pevans15@cisco.com,158.234.59.34,2011-12-15 23:26:31 +43,Steven,sriley16@google.ca,90.247.57.68,2011-10-29 19:03:28 +44,Deborah,dbrown17@hexun.com,179.125.143.240,1995-04-10 14:36:07 +45,Lori,lross18@ow.ly,64.80.162.180,1980-12-27 16:49:15 +46,Sean,sjackson19@tumblr.com,240.116.183.69,1988-06-12 21:24:45 +47,Terry,tbarnes1a@163.com,118.38.213.137,1997-09-22 16:43:19 +48,Dorothy,dross1b@ebay.com,116.81.76.49,2005-02-28 13:33:24 +49,Samuel,swashington1c@house.gov,38.191.253.40,1989-01-19 21:15:48 +50,Ralph,rcarter1d@tinyurl.com,104.84.60.174,2007-08-11 10:21:49 +51,Wayne,whudson1e@princeton.edu,90.61.24.102,1983-07-03 16:58:12 +52,Rose,rjames1f@plala.or.jp,240.83.81.10,1995-06-08 11:46:23 +53,Louise,lcox1g@theglobeandmail.com,105.11.82.145,2016-09-19 14:45:51 +54,Kenneth,kjohnson1h@independent.co.uk,139.5.45.94,1976-08-17 11:26:19 +55,Donna,dbrown1i@amazon.co.uk,19.45.169.45,2006-05-27 16:51:40 +56,Johnny,jvasquez1j@trellian.com,118.202.238.23,1975-11-17 08:42:32 +57,Patrick,pramirez1k@tamu.edu,231.25.153.198,1997-08-06 11:51:09 +58,Helen,hlarson1l@prweb.com,8.40.21.39,1993-08-04 19:53:40 +59,Patricia,pspencer1m@gmpg.org,212.198.40.15,1977-08-03 16:37:27 +60,Joseph,jspencer1n@marriott.com,13.15.63.238,2005-07-23 20:22:06 +61,Phillip,pschmidt1o@blogtalkradio.com,177.98.201.190,1976-05-19 21:47:44 +62,Joan,jwebb1p@google.ru,105.229.170.71,1972-09-07 17:53:47 +63,Phyllis,pkennedy1q@imgur.com,35.145.8.244,2000-01-01 22:33:37 +64,Katherine,khunter1r@smh.com.au,248.168.205.32,1991-01-09 06:40:24 +65,Laura,lvasquez1s@wiley.com,128.129.115.152,1997-10-23 12:04:56 +66,Juan,jdunn1t@state.gov,44.228.124.51,2004-11-10 05:07:35 +67,Judith,jholmes1u@wiley.com,40.227.179.115,1977-08-02 17:01:45 +68,Beverly,bbaker1v@wufoo.com,208.34.84.59,2016-03-06 20:07:23 +69,Lawrence,lcarr1w@flickr.com,59.158.212.223,1988-09-13 06:07:21 +70,Gloria,gwilliams1x@mtv.com,245.231.88.33,1995-03-18 22:32:46 +71,Steven,ssims1y@cbslocal.com,104.50.58.255,2001-08-05 21:26:20 +72,Betty,bmills1z@arstechnica.com,103.177.214.220,1981-12-14 21:26:54 +73,Mildred,mfuller20@prnewswire.com,151.158.8.130,2000-04-19 10:13:55 +74,Donald,dday21@icq.com,9.178.102.255,1972-12-03 00:58:24 +75,Eric,ethomas22@addtoany.com,85.2.241.227,1992-11-01 05:59:30 +76,Joyce,jarmstrong23@sitemeter.com,169.224.20.36,1985-10-24 06:50:01 +77,Maria,mmartinez24@amazonaws.com,143.189.167.135,2005-10-05 05:17:42 +78,Harry,hburton25@youtube.com,156.47.176.237,1978-03-26 05:53:33 +79,Kevin,klawrence26@hao123.com,79.136.183.83,1994-10-12 04:38:52 +80,David,dhall27@prweb.com,133.149.172.153,1976-12-15 16:24:24 +81,Kathy,kperry28@twitter.com,229.242.72.228,1979-03-04 02:58:56 +82,Adam,aprice29@elegantthemes.com,13.145.21.10,1982-11-07 11:46:59 +83,Brandon,bgriffin2a@va.gov,73.249.128.212,2013-10-30 05:30:36 +84,Henry,hnguyen2b@discovery.com,211.36.214.242,1985-01-09 06:37:27 +85,Eric,esanchez2c@edublogs.org,191.166.188.251,2004-05-01 23:21:42 +86,Jason,jlee2d@jimdo.com,193.92.16.182,1973-01-08 09:05:39 +87,Diana,drichards2e@istockphoto.com,19.130.175.245,1994-10-05 22:50:49 +88,Andrea,awelch2f@abc.net.au,94.155.233.96,2002-04-26 08:41:44 +89,Louis,lwagner2g@miitbeian.gov.cn,26.217.34.111,2003-08-25 07:56:39 +90,Jane,jsims2h@seesaa.net,43.4.220.135,1987-03-20 20:39:04 +91,Larry,lgrant2i@si.edu,97.126.79.34,2000-09-07 20:26:19 +92,Louis,ldean2j@prnewswire.com,37.148.40.127,2011-09-16 20:12:14 +93,Jennifer,jcampbell2k@xing.com,38.106.254.142,1988-07-15 05:06:49 +94,Wayne,wcunningham2l@google.com.hk,223.28.26.187,2009-12-15 06:16:54 +95,Lori,lstevens2m@icq.com,181.250.181.58,1984-10-28 03:29:19 +96,Judy,jsimpson2n@marriott.com,180.121.239.219,1986-02-07 15:18:10 +97,Phillip,phoward2o@usa.gov,255.247.0.175,2002-12-26 08:44:45 +98,Gloria,gwalker2p@usa.gov,156.140.7.128,1997-10-04 07:58:58 +99,Paul,pjohnson2q@umn.edu,183.59.198.197,1991-11-14 12:33:55 +100,Frank,fgreene2r@blogspot.com,150.143.68.121,2010-06-12 23:55:39 +101,Deborah,dknight2s@reverbnation.com,222.131.211.191,1970-07-08 08:54:23 +102,Sandra,sblack2t@tripadvisor.com,254.183.128.254,2000-04-12 02:39:36 +103,Edward,eburns2u@dailymotion.com,253.89.118.18,1993-10-10 10:54:01 +104,Anthony,ayoung2v@ustream.tv,118.4.193.176,1978-08-26 17:07:29 +105,Donald,dlawrence2w@wp.com,139.200.159.227,2007-07-21 20:56:20 +106,Matthew,mfreeman2x@google.fr,205.26.239.92,2014-12-05 17:05:39 +107,Sean,ssanders2y@trellian.com,143.89.82.108,1993-07-14 21:45:02 +108,Sharon,srobinson2z@soundcloud.com,66.234.247.54,1977-04-06 19:07:03 +109,Jennifer,jwatson30@t-online.de,196.102.127.7,1998-03-07 05:12:23 +110,Clarence,cbrooks31@si.edu,218.93.234.73,2002-11-06 17:22:25 +111,Jose,jflores32@goo.gl,185.105.244.231,1995-01-05 06:32:21 +112,George,glee33@adobe.com,173.82.249.196,2015-01-04 02:47:46 +113,Larry,lhill34@linkedin.com,66.5.206.195,2010-11-02 10:21:17 +114,Marie,mmeyer35@mysql.com,151.152.88.107,1990-05-22 20:52:51 +115,Clarence,cwebb36@skype.com,130.198.55.217,1972-10-27 07:38:54 +116,Sarah,scarter37@answers.com,80.89.18.153,1971-08-24 19:29:30 +117,Henry,hhughes38@webeden.co.uk,152.60.114.174,1973-01-27 09:00:42 +118,Teresa,thenry39@hao123.com,32.187.239.106,2015-11-06 01:48:44 +119,Billy,bgutierrez3a@sun.com,52.37.70.134,2002-03-19 03:20:19 +120,Anthony,agibson3b@github.io,154.251.232.213,1991-04-19 01:08:15 +121,Sandra,sromero3c@wikia.com,44.124.171.2,1998-09-06 20:30:34 +122,Paula,pandrews3d@blogs.com,153.142.118.226,2003-06-24 16:31:24 +123,Terry,tbaker3e@csmonitor.com,99.120.45.219,1970-12-09 23:57:21 +124,Lois,lwilson3f@reuters.com,147.44.171.83,1971-01-09 22:28:51 +125,Sara,smorgan3g@nature.com,197.67.192.230,1992-01-28 20:33:24 +126,Charles,ctorres3h@china.com.cn,156.115.216.2,1993-10-02 19:36:34 +127,Richard,ralexander3i@marriott.com,248.235.180.59,1999-02-03 18:40:55 +128,Christina,charper3j@cocolog-nifty.com,152.114.116.129,1978-09-13 00:37:32 +129,Steve,sadams3k@economist.com,112.248.91.98,2004-03-21 09:07:43 +130,Katherine,krobertson3l@ow.ly,37.220.107.28,1977-03-18 19:28:50 +131,Donna,dgibson3m@state.gov,222.218.76.221,1999-02-01 06:46:16 +132,Christina,cwest3n@mlb.com,152.114.6.160,1979-12-24 15:30:35 +133,Sandra,swillis3o@meetup.com,180.71.49.34,1984-09-27 08:05:54 +134,Clarence,cedwards3p@smugmug.com,10.64.180.186,1979-04-16 16:52:10 +135,Ruby,rjames3q@wp.com,98.61.54.20,2007-01-13 14:25:52 +136,Sarah,smontgomery3r@tripod.com,91.45.164.172,2009-07-25 04:34:30 +137,Sarah,soliver3s@eventbrite.com,30.106.39.146,2012-05-09 22:12:33 +138,Deborah,dwheeler3t@biblegateway.com,59.105.213.173,1999-11-09 08:08:44 +139,Deborah,dray3u@i2i.jp,11.108.186.217,2014-02-04 03:15:19 +140,Paul,parmstrong3v@alexa.com,6.250.59.43,2009-12-21 10:08:53 +141,Aaron,abishop3w@opera.com,207.145.249.62,1996-04-25 23:20:23 +142,Henry,hsanders3x@google.ru,140.215.203.171,2012-01-29 11:52:32 +143,Anne,aanderson3y@1688.com,74.150.102.118,1982-04-03 13:46:17 +144,Victor,vmurphy3z@hugedomains.com,222.155.99.152,1987-11-03 19:58:41 +145,Evelyn,ereid40@pbs.org,249.122.33.117,1977-12-14 17:09:57 +146,Brian,bgonzalez41@wikia.com,246.254.235.141,1991-02-24 00:45:58 +147,Sandra,sgray42@squarespace.com,150.73.28.159,1972-07-28 17:26:32 +148,Alice,ajones43@a8.net,78.253.12.177,2002-12-05 16:57:46 +149,Jessica,jhanson44@mapquest.com,87.229.30.160,1994-01-30 11:40:04 +150,Louise,lbailey45@reuters.com,191.219.31.101,2011-09-07 21:11:45 +151,Christopher,cgonzalez46@printfriendly.com,83.137.213.239,1984-10-24 14:58:04 +152,Gregory,gcollins47@yandex.ru,28.176.10.115,1998-07-25 17:17:10 +153,Jane,jperkins48@usnews.com,46.53.164.159,1979-08-19 15:25:00 +154,Phyllis,plong49@yahoo.co.jp,208.140.88.2,1985-07-06 02:16:36 +155,Adam,acarter4a@scribd.com,78.48.148.204,2005-07-20 03:31:09 +156,Frank,fweaver4b@angelfire.com,199.180.255.224,2011-03-04 23:07:54 +157,Ronald,rmurphy4c@cloudflare.com,73.42.97.231,1991-01-11 10:39:41 +158,Richard,rmorris4d@e-recht24.de,91.9.97.223,2009-01-17 21:05:15 +159,Rose,rfoster4e@woothemes.com,203.169.53.16,1991-04-21 02:09:38 +160,George,ggarrett4f@uiuc.edu,186.61.5.167,1989-11-11 11:29:42 +161,Victor,vhamilton4g@biblegateway.com,121.229.138.38,2012-06-22 18:01:23 +162,Mark,mbennett4h@businessinsider.com,209.184.29.203,1980-04-16 15:26:34 +163,Martin,mwells4i@ifeng.com,97.223.55.105,2010-05-26 14:08:18 +164,Diana,dstone4j@google.ru,90.155.52.47,2013-02-11 00:14:54 +165,Walter,wferguson4k@blogger.com,30.63.212.44,1986-02-20 17:46:46 +166,Denise,dcoleman4l@vistaprint.com,10.209.153.77,1992-05-13 20:14:14 +167,Philip,pknight4m@xing.com,15.28.135.167,2000-09-11 18:41:13 +168,Russell,rcarr4n@youtube.com,113.55.165.50,2008-07-10 17:49:27 +169,Donna,dburke4o@dion.ne.jp,70.0.105.111,1992-02-10 17:24:58 +170,Anne,along4p@squidoo.com,36.154.58.107,2012-08-19 23:35:31 +171,Clarence,cbanks4q@webeden.co.uk,94.57.53.114,1972-03-11 21:46:44 +172,Betty,bbowman4r@cyberchimps.com,178.115.209.69,2013-01-13 21:34:51 +173,Andrew,ahudson4s@nytimes.com,84.32.252.144,1998-09-15 14:20:04 +174,Keith,kgordon4t@cam.ac.uk,189.237.211.102,2009-01-22 05:34:38 +175,Patrick,pwheeler4u@mysql.com,47.22.117.226,1984-09-05 22:33:15 +176,Jesse,jfoster4v@mapquest.com,229.95.131.46,1990-01-20 12:19:15 +177,Arthur,afisher4w@jugem.jp,107.255.244.98,1983-10-13 11:08:46 +178,Nicole,nryan4x@wsj.com,243.211.33.221,1974-05-30 23:19:14 +179,Bruce,bjohnson4y@sfgate.com,17.41.200.101,1992-09-23 02:02:19 +180,Terry,tcox4z@reference.com,20.189.120.106,1982-02-13 12:43:14 +181,Ashley,astanley50@kickstarter.com,86.3.56.98,1976-05-09 01:27:16 +182,Michael,mrivera51@about.me,72.118.249.0,1971-11-11 17:28:37 +183,Steven,sgonzalez52@mozilla.org,169.112.247.47,2002-08-24 14:59:25 +184,Kathleen,kfuller53@bloglovin.com,80.93.59.30,2002-03-11 13:41:29 +185,Nicole,nhenderson54@usda.gov,39.253.60.30,1995-04-24 05:55:07 +186,Ralph,rharper55@purevolume.com,167.147.142.189,1980-02-10 18:35:45 +187,Heather,hcunningham56@photobucket.com,96.222.196.229,2007-06-15 05:37:50 +188,Nancy,nlittle57@cbc.ca,241.53.255.175,2007-07-12 23:42:48 +189,Juan,jramirez58@pinterest.com,190.128.84.27,1978-11-07 23:37:37 +190,Beverly,bfowler59@chronoengine.com,54.144.230.49,1979-03-31 23:27:28 +191,Shirley,sstevens5a@prlog.org,200.97.231.248,2011-12-06 07:08:50 +192,Annie,areyes5b@squidoo.com,223.32.182.101,2011-05-28 02:42:09 +193,Jack,jkelley5c@tiny.cc,47.34.118.150,1981-12-05 17:31:40 +194,Keith,krobinson5d@1und1.de,170.210.209.31,1999-03-09 11:05:43 +195,Joseph,jmiller5e@google.com.au,136.74.212.139,1984-10-08 13:18:20 +196,Annie,aday5f@blogspot.com,71.99.186.69,1986-02-18 12:27:34 +197,Nancy,nperez5g@liveinternet.ru,28.160.6.107,1983-10-20 17:51:20 +198,Tammy,tward5h@ucoz.ru,141.43.164.70,1980-03-31 04:45:29 +199,Doris,dryan5i@ted.com,239.117.202.188,1985-07-03 03:17:53 +200,Rose,rmendoza5j@photobucket.com,150.200.206.79,1973-04-21 21:36:40 +201,Cynthia,cbutler5k@hubpages.com,80.153.174.161,2001-01-20 01:42:26 +202,Samuel,soliver5l@people.com.cn,86.127.246.140,1970-09-02 02:19:00 +203,Carl,csanchez5m@mysql.com,50.149.237.107,1993-12-01 07:02:09 +204,Kathryn,kowens5n@geocities.jp,145.166.205.201,2004-07-06 18:39:33 +205,Nicholas,nnichols5o@parallels.com,190.240.66.170,2014-11-11 18:52:19 +206,Keith,kwillis5p@youtube.com,181.43.206.100,1998-06-13 06:30:51 +207,Justin,jwebb5q@intel.com,211.54.245.74,2000-11-04 16:58:26 +208,Gary,ghicks5r@wikipedia.org,196.154.213.104,1992-12-01 19:48:28 +209,Martin,mpowell5s@flickr.com,153.67.12.241,1983-06-30 06:24:32 +210,Brenda,bkelley5t@xinhuanet.com,113.100.5.172,2005-01-08 20:50:22 +211,Edward,eray5u@a8.net,205.187.246.65,2011-09-26 08:04:44 +212,Steven,slawson5v@senate.gov,238.150.250.36,1978-11-22 02:48:09 +213,Robert,rthompson5w@furl.net,70.7.89.236,2001-09-12 08:52:07 +214,Jack,jporter5x@diigo.com,220.172.29.99,1976-07-26 14:29:21 +215,Lisa,ljenkins5y@oakley.com,150.151.170.180,2010-03-20 19:21:16 +216,Theresa,tbell5z@mayoclinic.com,247.25.53.173,2001-03-11 05:36:40 +217,Jimmy,jstephens60@weather.com,145.101.93.235,1983-04-12 09:35:30 +218,Louis,lhunt61@amazon.co.jp,78.137.6.253,1997-08-29 19:34:34 +219,Lawrence,lgilbert62@ted.com,243.132.8.78,2015-04-08 22:06:56 +220,David,dgardner63@4shared.com,204.40.46.136,1971-07-09 03:29:11 +221,Charles,ckennedy64@gmpg.org,211.83.233.2,2011-02-26 11:55:04 +222,Lillian,lbanks65@msu.edu,124.233.12.80,2010-05-16 20:29:02 +223,Ernest,enguyen66@baidu.com,82.45.128.148,1996-07-04 10:07:04 +224,Ryan,rrussell67@cloudflare.com,202.53.240.223,1983-08-05 12:36:29 +225,Donald,ddavis68@ustream.tv,47.39.218.137,1989-05-27 02:30:56 +226,Joe,jscott69@blogspot.com,140.23.131.75,1973-03-16 12:21:31 +227,Anne,amarshall6a@google.ca,113.162.200.197,1988-12-09 03:38:29 +228,Willie,wturner6b@constantcontact.com,85.83.182.249,1991-10-06 01:51:10 +229,Nicole,nwilson6c@sogou.com,30.223.51.135,1977-05-29 19:54:56 +230,Janet,jwheeler6d@stumbleupon.com,153.194.27.144,2011-03-13 12:48:47 +231,Lois,lcarr6e@statcounter.com,0.41.36.53,1993-02-06 04:52:01 +232,Shirley,scruz6f@tmall.com,37.156.39.223,2007-02-18 17:47:01 +233,Patrick,pford6g@reverbnation.com,36.198.200.89,1977-03-06 15:47:24 +234,Lisa,lhudson6h@usatoday.com,134.213.58.137,2014-10-28 01:56:56 +235,Pamela,pmartinez6i@opensource.org,5.151.127.202,1987-11-30 16:44:47 +236,Larry,lperez6j@infoseek.co.jp,235.122.96.148,1979-01-18 06:33:45 +237,Pamela,pramirez6k@census.gov,138.233.34.163,2012-01-29 10:35:20 +238,Daniel,dcarr6l@php.net,146.21.152.242,1984-11-17 08:22:59 +239,Patrick,psmith6m@indiegogo.com,136.222.199.36,2001-05-30 22:16:44 +240,Raymond,rhenderson6n@hc360.com,116.31.112.38,2000-01-05 20:35:41 +241,Teresa,treynolds6o@miitbeian.gov.cn,198.126.205.220,1996-11-08 01:27:31 +242,Johnny,jmason6p@flickr.com,192.8.232.114,2013-05-14 05:35:50 +243,Angela,akelly6q@guardian.co.uk,234.116.60.197,1977-08-20 02:05:17 +244,Douglas,dcole6r@cmu.edu,128.135.212.69,2016-10-26 17:40:36 +245,Frances,fcampbell6s@twitpic.com,94.22.243.235,1987-04-26 07:07:13 +246,Donna,dgreen6t@chron.com,227.116.46.107,2011-07-25 12:59:54 +247,Benjamin,bfranklin6u@redcross.org,89.141.142.89,1974-05-03 20:28:18 +248,Randy,rpalmer6v@rambler.ru,70.173.63.178,2011-12-20 17:40:18 +249,Melissa,mmurray6w@bbb.org,114.234.118.137,1991-02-26 12:45:44 +250,Jean,jlittle6x@epa.gov,141.21.163.254,1991-08-16 04:57:09 +251,Daniel,dolson6y@nature.com,125.75.104.97,2010-04-23 06:25:54 +252,Kathryn,kwells6z@eventbrite.com,225.104.28.249,2015-01-31 02:21:50 +253,Theresa,tgonzalez70@ox.ac.uk,91.93.156.26,1971-12-11 10:31:31 +254,Beverly,broberts71@bluehost.com,244.40.158.89,2013-09-21 13:02:31 +255,Pamela,pmurray72@netscape.com,218.54.95.216,1985-04-16 00:34:00 +256,Timothy,trichardson73@amazonaws.com,235.49.24.229,2000-11-11 09:48:28 +257,Mildred,mpalmer74@is.gd,234.125.95.132,1992-05-25 02:25:02 +258,Jessica,jcampbell75@google.it,55.98.30.140,2014-08-26 00:26:34 +259,Beverly,bthomas76@cpanel.net,48.78.228.176,1970-08-18 10:40:05 +260,Eugene,eward77@cargocollective.com,139.226.204.2,1996-12-04 23:17:00 +261,Andrea,aallen78@webnode.com,160.31.214.38,2009-07-06 07:22:37 +262,Justin,jruiz79@merriam-webster.com,150.149.246.122,2005-06-06 11:44:19 +263,Kenneth,kedwards7a@networksolutions.com,98.82.193.128,2001-07-03 02:00:10 +264,Rachel,rday7b@miibeian.gov.cn,114.15.247.221,1994-08-18 19:45:40 +265,Russell,rmiller7c@instagram.com,184.130.152.253,1977-11-06 01:58:12 +266,Bonnie,bhudson7d@cornell.edu,235.180.186.206,1990-12-03 22:45:24 +267,Raymond,rknight7e@yandex.ru,161.2.44.252,1995-08-25 04:31:19 +268,Bonnie,brussell7f@elpais.com,199.237.57.207,1991-03-29 08:32:06 +269,Marie,mhenderson7g@elpais.com,52.203.131.144,2004-06-04 21:50:28 +270,Alan,acarr7h@trellian.com,147.51.205.72,2005-03-03 10:51:31 +271,Barbara,bturner7i@hugedomains.com,103.160.110.226,2004-08-04 13:42:40 +272,Christina,cdaniels7j@census.gov,0.238.61.251,1972-10-18 12:47:33 +273,Jeremy,jgomez7k@reuters.com,111.26.65.56,2013-01-13 10:41:35 +274,Laura,lwood7l@icio.us,149.153.38.205,2011-06-25 09:33:59 +275,Matthew,mbowman7m@auda.org.au,182.138.206.172,1999-03-05 03:25:36 +276,Denise,dparker7n@icq.com,0.213.88.138,2011-11-04 09:43:06 +277,Phillip,pparker7o@discuz.net,219.242.165.240,1973-10-19 04:22:29 +278,Joan,jpierce7p@salon.com,63.31.213.202,1989-04-09 22:06:24 +279,Irene,ibaker7q@cbc.ca,102.33.235.114,1992-09-04 13:00:57 +280,Betty,bbowman7r@ted.com,170.91.249.242,2015-09-28 08:14:22 +281,Teresa,truiz7s@boston.com,82.108.158.207,1999-07-18 05:17:09 +282,Helen,hbrooks7t@slideshare.net,102.87.162.187,2003-01-06 15:45:29 +283,Karen,kgriffin7u@wunderground.com,43.82.44.184,2010-05-28 01:56:37 +284,Lisa,lfernandez7v@mtv.com,200.238.218.220,1993-04-03 20:33:51 +285,Jesse,jlawrence7w@timesonline.co.uk,95.122.105.78,1990-01-05 17:28:43 +286,Terry,tross7x@macromedia.com,29.112.114.133,2009-08-29 21:32:17 +287,Angela,abradley7y@icq.com,177.44.27.72,1989-10-04 21:46:06 +288,Maria,mhart7z@dailymotion.com,55.27.55.202,1975-01-21 01:22:57 +289,Raymond,randrews80@pinterest.com,88.90.78.67,1992-03-16 21:37:40 +290,Kathy,krice81@bluehost.com,212.63.196.102,2000-12-14 03:06:44 +291,Cynthia,cramos82@nymag.com,107.89.190.6,2005-06-28 02:02:33 +292,Kimberly,kjones83@mysql.com,86.169.101.101,2007-06-13 22:56:49 +293,Timothy,thansen84@microsoft.com,108.100.254.90,2003-04-04 10:31:57 +294,Carol,cspencer85@berkeley.edu,75.118.144.187,1999-03-30 14:53:21 +295,Louis,lmedina86@latimes.com,141.147.163.24,1991-04-11 17:53:13 +296,Margaret,mcole87@google.fr,53.184.26.83,1991-12-19 01:54:10 +297,Mary,mgomez88@yellowpages.com,208.56.57.99,1976-05-21 18:05:08 +298,Amanda,aanderson89@geocities.com,147.73.15.252,1987-08-22 15:05:28 +299,Kathryn,kgarrett8a@nature.com,27.29.177.220,1976-07-15 04:25:04 +300,Dorothy,dmason8b@shareasale.com,106.210.99.193,1990-09-03 21:39:31 +301,Lois,lkennedy8c@amazon.de,194.169.29.187,2007-07-29 14:09:31 +302,Irene,iburton8d@washingtonpost.com,196.143.110.249,2013-09-05 11:32:46 +303,Betty,belliott8e@wired.com,183.105.222.199,1979-09-19 19:29:13 +304,Bobby,bmeyer8f@census.gov,36.13.161.145,2014-05-24 14:34:39 +305,Ann,amorrison8g@sfgate.com,72.154.54.137,1978-10-05 14:22:34 +306,Daniel,djackson8h@wunderground.com,144.95.32.34,1990-07-27 13:23:05 +307,Joe,jboyd8i@alibaba.com,187.105.86.178,2011-09-28 16:46:32 +308,Ralph,rdunn8j@fc2.com,3.19.87.255,1984-10-18 08:00:40 +309,Craig,ccarter8k@gizmodo.com,235.152.76.215,1998-07-04 12:15:21 +310,Paula,pdean8l@hhs.gov,161.100.173.197,1973-02-13 09:38:55 +311,Andrew,agarrett8m@behance.net,199.253.123.218,1991-02-14 13:36:32 +312,Janet,jhowell8n@alexa.com,39.189.139.79,2012-11-24 20:17:33 +313,Keith,khansen8o@godaddy.com,116.186.223.196,1987-08-23 21:22:05 +314,Nicholas,nedwards8p@state.gov,142.175.142.11,1977-03-28 18:27:27 +315,Jacqueline,jallen8q@oaic.gov.au,189.66.135.192,1994-10-26 11:44:26 +316,Frank,fgardner8r@mapy.cz,154.77.119.169,1983-01-29 19:19:51 +317,Eric,eharrison8s@google.cn,245.139.65.123,1984-02-04 09:54:36 +318,Gregory,gcooper8t@go.com,171.147.0.221,2004-06-14 05:22:08 +319,Jean,jfreeman8u@rakuten.co.jp,67.243.121.5,1977-01-07 18:23:43 +320,Juan,jlewis8v@shinystat.com,216.181.171.189,2001-08-23 17:32:43 +321,Randy,rwilliams8w@shinystat.com,105.152.146.28,1983-02-17 00:05:50 +322,Stephen,shart8x@sciencedirect.com,196.131.205.148,2004-02-15 10:12:03 +323,Annie,ahunter8y@example.com,63.36.34.103,2003-07-23 21:15:25 +324,Melissa,mflores8z@cbc.ca,151.230.217.90,1983-11-02 14:53:56 +325,Jane,jweaver90@about.me,0.167.235.217,1987-07-29 00:13:44 +326,Anthony,asmith91@oracle.com,97.87.48.41,2001-05-31 18:44:11 +327,Terry,tdavis92@buzzfeed.com,46.20.12.51,2015-09-12 23:13:55 +328,Brandon,bmontgomery93@gravatar.com,252.101.48.186,2010-10-28 08:26:27 +329,Chris,cmurray94@bluehost.com,25.158.167.97,2004-05-05 16:10:31 +330,Denise,dfuller95@hugedomains.com,216.210.149.28,1979-04-20 08:57:24 +331,Arthur,amcdonald96@sakura.ne.jp,206.42.36.213,2009-08-15 03:26:16 +332,Jesse,jhoward97@google.cn,46.181.118.30,1974-04-18 14:08:41 +333,Frank,fsimpson98@domainmarket.com,163.220.211.87,2006-06-30 14:46:52 +334,Janice,jwoods99@pen.io,229.245.237.182,1988-04-06 11:52:58 +335,Rebecca,rroberts9a@huffingtonpost.com,148.96.15.80,1976-10-05 08:44:16 +336,Joshua,jray9b@opensource.org,192.253.12.198,1971-12-25 22:27:07 +337,Joyce,jcarpenter9c@statcounter.com,125.171.46.215,2001-12-31 22:08:13 +338,Andrea,awest9d@privacy.gov.au,79.101.180.201,1983-02-18 20:07:47 +339,Christine,chudson9e@yelp.com,64.198.43.56,1997-09-08 08:03:43 +340,Joe,jparker9f@earthlink.net,251.215.148.153,1973-11-04 05:08:18 +341,Thomas,tkim9g@answers.com,49.187.34.47,1991-08-07 21:13:48 +342,Janice,jdean9h@scientificamerican.com,4.197.117.16,2009-12-08 02:35:49 +343,James,jmitchell9i@umich.edu,43.121.18.147,2011-04-28 17:04:09 +344,Charles,cgardner9j@purevolume.com,197.78.240.240,1998-02-11 06:47:07 +345,Robert,rhenderson9k@friendfeed.com,215.84.180.88,2002-05-10 15:33:14 +346,Chris,cgray9l@4shared.com,249.70.192.240,1998-10-03 16:43:42 +347,Gloria,ghayes9m@hibu.com,81.103.138.26,1999-12-26 11:23:13 +348,Edward,eramirez9n@shareasale.com,38.136.90.136,2010-08-19 08:01:06 +349,Cheryl,cbutler9o@google.ca,172.180.78.172,1995-05-27 20:03:52 +350,Margaret,mwatkins9p@sfgate.com,3.20.198.6,2014-10-21 01:42:58 +351,Rebecca,rwelch9q@examiner.com,45.81.42.208,2001-02-08 12:19:06 +352,Joe,jpalmer9r@phpbb.com,163.202.92.190,1970-01-05 11:29:12 +353,Sandra,slewis9s@dyndns.org,77.215.201.236,1974-01-05 07:04:04 +354,Todd,tfranklin9t@g.co,167.125.181.82,2009-09-28 10:13:58 +355,Joseph,jlewis9u@webmd.com,244.204.6.11,1990-10-21 15:49:57 +356,Alan,aknight9v@nydailynews.com,152.197.95.83,1996-03-08 08:43:17 +357,Sharon,sdean9w@123-reg.co.uk,237.46.40.26,1985-11-30 12:09:24 +358,Annie,awright9x@cafepress.com,190.45.231.111,2000-08-24 11:56:06 +359,Diane,dhamilton9y@youtube.com,85.146.171.196,2015-02-24 02:03:57 +360,Antonio,alane9z@auda.org.au,61.63.146.203,2001-05-13 03:43:34 +361,Matthew,mallena0@hhs.gov,29.97.32.19,1973-02-19 23:43:32 +362,Bonnie,bfowlera1@soup.io,251.216.99.53,2013-08-01 15:35:41 +363,Margaret,mgraya2@examiner.com,69.255.151.79,1998-01-23 22:24:59 +364,Joan,jwagnera3@printfriendly.com,192.166.120.61,1973-07-13 00:30:22 +365,Catherine,cperkinsa4@nytimes.com,58.21.24.214,2006-11-19 11:52:26 +366,Mark,mcartera5@cpanel.net,220.33.102.142,2007-09-09 09:43:27 +367,Paula,ppricea6@msn.com,36.182.238.124,2009-11-11 09:13:05 +368,Catherine,cgreena7@army.mil,228.203.58.19,2005-08-09 16:52:15 +369,Helen,hhamiltona8@symantec.com,155.56.194.99,2005-02-01 05:40:36 +370,Jane,jmeyera9@ezinearticles.com,133.244.113.213,2013-11-06 22:10:23 +371,Wanda,wevansaa@bloglovin.com,233.125.192.48,1994-12-26 23:43:42 +372,Mark,mmarshallab@tumblr.com,114.74.60.47,2016-09-29 18:03:01 +373,Andrew,amartinezac@google.cn,182.54.37.130,1976-06-06 17:04:17 +374,Helen,hmoralesad@e-recht24.de,42.45.4.123,1977-03-28 19:06:59 +375,Bonnie,bstoneae@php.net,196.149.79.137,1970-02-05 17:05:58 +376,Douglas,dfreemanaf@nasa.gov,215.65.124.218,2008-11-20 21:51:55 +377,Willie,wwestag@army.mil,35.189.92.118,1992-07-24 05:08:08 +378,Cheryl,cwagnerah@upenn.edu,228.239.222.141,2010-01-25 06:29:01 +379,Sandra,swardai@baidu.com,63.11.113.240,1985-05-23 08:07:37 +380,Julie,jrobinsonaj@jugem.jp,110.58.202.50,2015-03-05 09:42:07 +381,Larry,lwagnerak@shop-pro.jp,98.234.25.24,1975-07-22 22:22:02 +382,Juan,jcastilloal@yelp.com,24.174.74.202,2007-01-17 09:32:43 +383,Donna,dfrazieram@artisteer.com,205.26.147.45,1990-02-11 20:55:46 +384,Rachel,rfloresan@w3.org,109.60.216.162,1983-05-22 22:42:18 +385,Robert,rreynoldsao@theguardian.com,122.65.209.130,2009-05-01 18:02:51 +386,Donald,dbradleyap@etsy.com,42.54.35.126,1997-01-16 16:31:52 +387,Rachel,rfisheraq@nih.gov,160.243.250.45,2006-02-17 22:05:49 +388,Nicholas,nhamiltonar@princeton.edu,156.211.37.111,1976-06-21 03:36:29 +389,Timothy,twhiteas@ca.gov,36.128.23.70,1975-09-24 03:51:18 +390,Diana,dbradleyat@odnoklassniki.ru,44.102.120.184,1983-04-27 09:02:50 +391,Billy,bfowlerau@jimdo.com,91.200.68.196,1995-01-29 06:57:35 +392,Bruce,bandrewsav@ucoz.com,48.12.101.125,1992-10-27 04:31:39 +393,Linda,lromeroaw@usa.gov,100.71.233.19,1992-06-08 15:13:18 +394,Debra,dwatkinsax@ucoz.ru,52.160.233.193,2001-11-11 06:51:01 +395,Katherine,kburkeay@wix.com,151.156.242.141,2010-06-14 19:54:28 +396,Martha,mharrisonaz@youku.com,21.222.10.199,1989-10-16 14:17:55 +397,Dennis,dwellsb0@youtu.be,103.16.29.3,1985-12-21 06:05:51 +398,Gloria,grichardsb1@bloglines.com,90.147.120.234,1982-08-27 01:04:43 +399,Brenda,bfullerb2@t.co,33.253.63.90,2011-04-20 05:00:35 +400,Larry,lhendersonb3@disqus.com,88.95.132.128,1982-08-31 02:15:12 +401,Richard,rlarsonb4@wisc.edu,13.48.231.150,1979-04-15 14:08:09 +402,Terry,thuntb5@usa.gov,65.91.103.240,1998-05-15 11:50:49 +403,Harry,hburnsb6@nasa.gov,33.38.21.244,1981-04-12 14:02:20 +404,Diana,dellisb7@mlb.com,218.229.81.135,1997-01-29 00:17:25 +405,Jack,jburkeb8@tripadvisor.com,210.227.182.216,1984-03-09 17:24:03 +406,Julia,jlongb9@fotki.com,10.210.12.104,2005-10-26 03:54:13 +407,Lois,lscottba@msu.edu,188.79.136.138,1973-02-02 18:40:39 +408,Sandra,shendersonbb@shareasale.com,114.171.220.108,2012-06-09 18:22:26 +409,Irene,isanchezbc@cdbaby.com,109.255.50.119,1983-09-28 21:11:27 +410,Emily,ebrooksbd@bandcamp.com,227.81.93.79,1970-08-31 21:08:01 +411,Michelle,mdiazbe@businessweek.com,236.249.6.226,1993-05-22 08:07:07 +412,Tammy,tbennettbf@wisc.edu,145.253.239.152,1978-12-31 20:24:51 +413,Christine,cgreenebg@flickr.com,97.25.140.118,1978-07-17 12:55:30 +414,Patricia,pgarzabh@tuttocitta.it,139.246.192.211,1984-02-27 13:40:08 +415,Kimberly,kromerobi@aol.com,73.56.88.247,1976-09-16 14:22:04 +416,George,gjohnstonbj@fda.gov,240.36.245.185,1979-07-24 14:36:02 +417,Eugene,efullerbk@sciencedaily.com,42.38.105.140,2012-09-12 01:56:41 +418,Andrea,astevensbl@goo.gl,31.152.207.204,1979-05-24 11:06:21 +419,Shirley,sreidbm@scientificamerican.com,103.60.31.241,1984-02-23 04:07:41 +420,Terry,tmorenobn@blinklist.com,92.161.34.42,1994-06-25 14:01:35 +421,Christopher,cmorenobo@go.com,158.86.176.82,1973-09-05 09:18:47 +422,Dennis,dhansonbp@ning.com,40.160.81.75,1982-01-20 10:19:41 +423,Beverly,brussellbq@de.vu,138.32.56.204,1997-11-06 07:20:19 +424,Howard,hparkerbr@163.com,103.171.134.171,2015-06-24 15:37:10 +425,Helen,hmccoybs@fema.gov,61.200.4.71,1995-06-20 08:59:10 +426,Ann,ahudsonbt@cafepress.com,239.187.71.125,1977-04-11 07:59:28 +427,Tina,twestbu@nhs.uk,80.213.117.74,1992-08-19 05:54:44 +428,Terry,tnguyenbv@noaa.gov,21.93.118.95,1991-09-19 23:22:55 +429,Ashley,aburtonbw@wix.com,233.176.205.109,2009-11-10 05:01:20 +430,Eric,emyersbx@1und1.de,168.91.212.67,1987-08-10 07:16:20 +431,Barbara,blittleby@lycos.com,242.14.189.239,2008-08-02 12:13:04 +432,Sean,sevansbz@instagram.com,14.39.177.13,2007-04-16 17:28:49 +433,Shirley,sburtonc0@newsvine.com,34.107.138.76,1980-12-10 02:19:29 +434,Patricia,pfreemanc1@so-net.ne.jp,219.213.142.117,1987-03-01 02:25:45 +435,Paula,pfosterc2@vkontakte.ru,227.14.138.141,1972-09-22 12:59:34 +436,Nicole,nstewartc3@1688.com,8.164.23.115,1998-10-27 00:10:17 +437,Earl,ekimc4@ovh.net,100.26.244.177,2013-01-22 10:05:46 +438,Beverly,breedc5@reuters.com,174.12.226.27,1974-09-22 07:29:36 +439,Lawrence,lbutlerc6@a8.net,105.164.42.164,1992-06-05 00:43:40 +440,Charles,cmoorec7@ucoz.com,252.197.131.69,1990-04-09 02:34:05 +441,Alice,alawsonc8@live.com,183.73.220.232,1989-02-28 09:11:04 +442,Dorothy,dcarpenterc9@arstechnica.com,241.47.200.14,2005-05-02 19:57:21 +443,Carolyn,cfowlerca@go.com,213.109.55.202,1978-09-10 20:18:20 +444,Anthony,alongcb@free.fr,169.221.158.204,1984-09-13 01:59:23 +445,Annie,amoorecc@e-recht24.de,50.34.148.61,2009-03-26 03:41:07 +446,Carlos,candrewscd@ihg.com,236.69.59.212,1972-03-29 22:42:48 +447,Beverly,bramosce@google.ca,164.250.184.49,1982-11-10 04:34:01 +448,Teresa,tlongcf@umich.edu,174.88.53.223,1987-05-17 12:48:00 +449,Roy,rboydcg@uol.com.br,91.58.243.215,1974-06-16 17:59:54 +450,Ashley,afieldsch@tamu.edu,130.138.11.126,1983-09-15 05:52:36 +451,Judith,jhawkinsci@cmu.edu,200.187.103.245,2003-10-22 12:24:03 +452,Rebecca,rwestcj@ocn.ne.jp,72.85.3.103,1980-11-13 11:01:26 +453,Raymond,rporterck@infoseek.co.jp,146.33.216.151,1982-05-17 23:58:03 +454,Janet,jmarshallcl@odnoklassniki.ru,52.46.193.166,1998-10-04 00:02:21 +455,Shirley,speterscm@salon.com,248.126.31.15,1987-01-30 06:04:59 +456,Annie,abowmancn@economist.com,222.213.248.59,2006-03-14 23:52:59 +457,Jean,jlarsonco@blogspot.com,71.41.25.195,2007-09-08 23:49:45 +458,Phillip,pmoralescp@stanford.edu,74.119.87.28,2011-03-14 20:25:40 +459,Norma,nrobinsoncq@economist.com,28.225.21.54,1989-10-21 01:22:43 +460,Kimberly,kclarkcr@dion.ne.jp,149.171.132.153,2008-06-27 02:27:30 +461,Ruby,rmorriscs@ucla.edu,177.85.163.249,2016-01-28 16:43:44 +462,Jonathan,jcastilloct@tripod.com,78.4.28.77,2000-05-24 17:33:06 +463,Edward,ebryantcu@jigsy.com,140.31.98.193,1992-12-17 08:32:47 +464,Chris,chamiltoncv@eepurl.com,195.171.234.206,1970-12-05 03:42:19 +465,Michael,mweavercw@reference.com,7.233.133.213,1987-03-29 02:30:54 +466,Howard,hlawrencecx@businessweek.com,113.225.124.224,1990-07-30 07:20:57 +467,Philip,phowardcy@comsenz.com,159.170.247.249,2010-10-15 10:18:37 +468,Mary,mmarshallcz@xing.com,125.132.189.70,2007-07-19 13:48:47 +469,Scott,salvarezd0@theguardian.com,78.49.103.230,1987-10-31 06:10:44 +470,Wayne,wcarrolld1@blog.com,238.1.120.204,1980-11-19 03:26:10 +471,Jennifer,jwoodsd2@multiply.com,92.20.224.49,2010-05-06 22:17:04 +472,Raymond,rwelchd3@toplist.cz,176.158.35.240,2007-12-12 19:02:51 +473,Steven,sdixond4@wisc.edu,167.55.237.52,1984-05-05 11:44:37 +474,Ralph,rjamesd5@ameblo.jp,241.190.50.133,2000-07-06 08:44:37 +475,Jason,jrobinsond6@hexun.com,138.119.139.56,2006-02-03 05:27:45 +476,Doris,dwoodd7@fema.gov,180.220.156.190,1978-05-11 20:14:20 +477,Elizabeth,eberryd8@youtu.be,74.188.53.229,2006-11-18 08:29:06 +478,Irene,igilbertd9@privacy.gov.au,194.152.218.1,1985-09-17 02:46:52 +479,Jessica,jdeanda@ameblo.jp,178.103.93.118,1974-06-07 19:04:05 +480,Rachel,ralvarezdb@phoca.cz,17.22.223.174,1999-03-08 02:43:25 +481,Kenneth,kthompsondc@shinystat.com,229.119.91.234,2007-05-15 13:17:32 +482,Harold,hmurraydd@parallels.com,133.26.188.80,1993-11-15 03:42:07 +483,Paula,phowellde@samsung.com,34.215.28.216,1993-11-29 15:55:00 +484,Ruth,rpiercedf@tripadvisor.com,111.30.130.123,1986-08-17 10:19:38 +485,Phyllis,paustindg@vk.com,50.84.34.178,1994-04-13 03:05:24 +486,Laura,lfosterdh@usnews.com,37.8.101.33,2001-06-30 08:58:59 +487,Eric,etaylordi@com.com,103.183.253.45,2006-09-15 20:18:46 +488,Doris,driveradj@prweb.com,247.16.2.199,1989-05-08 09:27:09 +489,Ryan,rhughesdk@elegantthemes.com,103.234.153.232,1989-08-01 18:36:06 +490,Steve,smoralesdl@jigsy.com,3.76.84.207,2011-03-13 17:01:05 +491,Louis,lsullivandm@who.int,78.135.44.208,1975-11-26 16:01:23 +492,Catherine,ctuckerdn@seattletimes.com,93.137.106.21,1990-03-13 16:14:56 +493,Ann,adixondo@gmpg.org,191.136.222.111,2002-06-05 14:22:18 +494,Johnny,jhartdp@amazon.com,103.252.198.39,1988-07-30 23:54:49 +495,Susan,srichardsdq@skype.com,126.247.192.11,2005-01-09 12:08:14 +496,Brenda,bparkerdr@skype.com,63.232.216.86,1974-05-18 05:58:29 +497,Tammy,tmurphyds@constantcontact.com,56.56.37.112,2014-08-05 18:22:25 +498,Larry,lhayesdt@wordpress.com,162.146.13.46,1997-02-26 14:01:53 +499,,ethomasdu@hhs.gov,6.241.88.250,2007-09-14 13:03:34 +500,Paula,pshawdv@networksolutions.com,123.27.47.249,2003-10-30 21:19:20 diff --git a/tests/functional/shared_tests/test_aliases.py b/tests/functional/shared_tests/test_aliases.py new file mode 100644 index 000000000..c053f9dd0 --- /dev/null +++ b/tests/functional/shared_tests/test_aliases.py @@ -0,0 +1,22 @@ +from dbt.tests.adapter.aliases.test_aliases import ( + BaseAliases, + BaseAliasErrors, + BaseSameAliasDifferentSchemas, + BaseSameAliasDifferentDatabases, +) + + +class TestAliases(BaseAliases): + pass + + +class TestAliasErrors(BaseAliasErrors): + pass + + +class TestSameAliasDifferentSchemas(BaseSameAliasDifferentSchemas): + pass + + +class TestSameAliasDifferentDatabases(BaseSameAliasDifferentDatabases): + pass diff --git a/tests/functional/shared_tests/test_basic.py b/tests/functional/shared_tests/test_basic.py new file mode 100644 index 000000000..bf31a1232 --- /dev/null +++ b/tests/functional/shared_tests/test_basic.py @@ -0,0 +1,79 @@ +from dbt.tests.adapter.basic.test_adapter_methods import BaseAdapterMethod +from dbt.tests.adapter.basic.test_base import BaseSimpleMaterializations +from dbt.tests.adapter.basic.test_docs_generate import ( + BaseDocsGenerate, + BaseDocsGenReferences, +) +from dbt.tests.adapter.basic.test_empty import BaseEmpty +from dbt.tests.adapter.basic.test_ephemeral import BaseEphemeral +from dbt.tests.adapter.basic.test_generic_tests import BaseGenericTests +from dbt.tests.adapter.basic.test_incremental import ( + BaseIncremental, + BaseIncrementalNotSchemaChange, +) +from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests +from dbt.tests.adapter.basic.test_singular_tests_ephemeral import BaseSingularTestsEphemeral +from dbt.tests.adapter.basic.test_snapshot_check_cols import BaseSnapshotCheckCols +from dbt.tests.adapter.basic.test_snapshot_timestamp import BaseSnapshotTimestamp +from dbt.tests.adapter.basic.test_table_materialization import BaseTableMaterialization +from dbt.tests.adapter.basic.test_validate_connection import BaseValidateConnection + + +class TestBaseCaching(BaseAdapterMethod): + pass + + +class TestSimpleMaterializations(BaseSimpleMaterializations): + pass + + +class TestDocsGenerate(BaseDocsGenerate): + pass + + +class TestDocsGenReferences(BaseDocsGenReferences): + pass + + +class TestEmpty(BaseEmpty): + pass + + +class TestEphemeral(BaseEphemeral): + pass + + +class TestGenericTests(BaseGenericTests): + pass + + +class TestIncremental(BaseIncremental): + pass + + +class TestBaseIncrementalNotSchemaChange(BaseIncrementalNotSchemaChange): + pass + + +class TestSingularTests(BaseSingularTests): + pass + + +class TestSingularTestsEphemeral(BaseSingularTestsEphemeral): + pass + + +class TestSnapshotCheckCols(BaseSnapshotCheckCols): + pass + + +class TestSnapshotTimestamp(BaseSnapshotTimestamp): + pass + + +class TestTableMat(BaseTableMaterialization): + pass + + +class TestValidateConnection(BaseValidateConnection): + pass diff --git a/tests/functional/shared_tests/test_caching.py b/tests/functional/shared_tests/test_caching.py new file mode 100644 index 000000000..071e4e1a1 --- /dev/null +++ b/tests/functional/shared_tests/test_caching.py @@ -0,0 +1,22 @@ +from dbt.tests.adapter.caching.test_caching import ( + BaseCachingLowercaseModel, + BaseCachingSelectedSchemaOnly, + BaseCachingUppercaseModel, + BaseNoPopulateCache, +) + + +class TestCachingLowerCaseModel(BaseCachingLowercaseModel): + pass + + +class TestCachingUppercaseModel(BaseCachingUppercaseModel): + pass + + +class TestCachingSelectedSchemaOnly(BaseCachingSelectedSchemaOnly): + pass + + +class TestNoPopulateCache(BaseNoPopulateCache): + pass diff --git a/tests/functional/shared_tests/test_clone.py b/tests/functional/shared_tests/test_clone.py new file mode 100644 index 000000000..862ebce62 --- /dev/null +++ b/tests/functional/shared_tests/test_clone.py @@ -0,0 +1,20 @@ +import pytest + +from dbt.tests.adapter.dbt_clone.test_dbt_clone import ( + BaseCloneNotPossible, + BaseClonePossible, + BaseCloneSameTargetAndState, +) + + +class TestBaseCloneNotPossible(BaseCloneNotPossible): + pass + + +@pytest.mark.skip("Cloning is not possible in Postgres") +class TestBaseClonePossible(BaseClonePossible): + pass + + +class TestCloneSameTargetAndState(BaseCloneSameTargetAndState): + pass diff --git a/tests/functional/shared_tests/test_column_types.py b/tests/functional/shared_tests/test_column_types.py new file mode 100644 index 000000000..f5037860b --- /dev/null +++ b/tests/functional/shared_tests/test_column_types.py @@ -0,0 +1,5 @@ +from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes + + +class TestPostgresColumnTypes(BaseColumnTypes): + pass diff --git a/tests/functional/shared_tests/test_concurrency.py b/tests/functional/shared_tests/test_concurrency.py new file mode 100644 index 000000000..f7e4b0e49 --- /dev/null +++ b/tests/functional/shared_tests/test_concurrency.py @@ -0,0 +1,5 @@ +from dbt.tests.adapter.concurrency.test_concurrency import BaseConcurrency + + +class TestConcurrency(BaseConcurrency): + pass diff --git a/tests/functional/shared_tests/test_constraints.py b/tests/functional/shared_tests/test_constraints.py new file mode 100644 index 000000000..52e5780de --- /dev/null +++ b/tests/functional/shared_tests/test_constraints.py @@ -0,0 +1,64 @@ +from dbt.tests.adapter.constraints.test_constraints import ( + BaseTableConstraintsColumnsEqual, + BaseViewConstraintsColumnsEqual, + BaseIncrementalConstraintsColumnsEqual, + BaseConstraintsRuntimeDdlEnforcement, + BaseConstraintsRollback, + BaseIncrementalConstraintsRuntimeDdlEnforcement, + BaseIncrementalConstraintsRollback, + BaseTableContractSqlHeader, + BaseIncrementalContractSqlHeader, + BaseModelConstraintsRuntimeEnforcement, + BaseConstraintQuotedColumn, + BaseIncrementalForeignKeyConstraint, +) + + +class TestTableConstraintsColumnsEqual(BaseTableConstraintsColumnsEqual): + pass + + +class TestViewConstraintsColumnsEqual(BaseViewConstraintsColumnsEqual): + pass + + +class TestIncrementalConstraintsColumnsEqual(BaseIncrementalConstraintsColumnsEqual): + pass + + +class TestTableConstraintsRuntimeDdlEnforcement(BaseConstraintsRuntimeDdlEnforcement): + pass + + +class TestTableConstraintsRollback(BaseConstraintsRollback): + pass + + +class TestIncrementalConstraintsRuntimeDdlEnforcement( + BaseIncrementalConstraintsRuntimeDdlEnforcement +): + pass + + +class TestIncrementalConstraintsRollback(BaseIncrementalConstraintsRollback): + pass + + +class TestTableContractSqlHeader(BaseTableContractSqlHeader): + pass + + +class TestIncrementalContractSqlHeader(BaseIncrementalContractSqlHeader): + pass + + +class TestModelConstraintsRuntimeEnforcement(BaseModelConstraintsRuntimeEnforcement): + pass + + +class TestConstraintQuotedColumn(BaseConstraintQuotedColumn): + pass + + +class TestIncrementalForeignKeyConstraint(BaseIncrementalForeignKeyConstraint): + pass diff --git a/tests/functional/shared_tests/test_data_types.py b/tests/functional/shared_tests/test_data_types.py new file mode 100644 index 000000000..984e32d82 --- /dev/null +++ b/tests/functional/shared_tests/test_data_types.py @@ -0,0 +1,35 @@ +from dbt.tests.adapter.utils.data_types.test_type_bigint import BaseTypeBigInt +from dbt.tests.adapter.utils.data_types.test_type_boolean import BaseTypeBoolean +from dbt.tests.adapter.utils.data_types.test_type_float import BaseTypeFloat +from dbt.tests.adapter.utils.data_types.test_type_int import BaseTypeInt +from dbt.tests.adapter.utils.data_types.test_type_numeric import BaseTypeNumeric +from dbt.tests.adapter.utils.data_types.test_type_string import BaseTypeString +from dbt.tests.adapter.utils.data_types.test_type_timestamp import BaseTypeTimestamp + + +class TestTypeBigInt(BaseTypeBigInt): + pass + + +class TestTypeBoolean(BaseTypeBoolean): + pass + + +class TestTypeFloat(BaseTypeFloat): + pass + + +class TestTypeInt(BaseTypeInt): + pass + + +class TestTypeNumeric(BaseTypeNumeric): + pass + + +class TestTypeString(BaseTypeString): + pass + + +class TestTypeTimestamp(BaseTypeTimestamp): + pass diff --git a/tests/functional/shared_tests/test_debug.py b/tests/functional/shared_tests/test_debug.py new file mode 100644 index 000000000..6372bc590 --- /dev/null +++ b/tests/functional/shared_tests/test_debug.py @@ -0,0 +1,12 @@ +from dbt.tests.adapter.dbt_debug.test_dbt_debug import ( + BaseDebugPostgres, + BaseDebugInvalidProjectPostgres, +) + + +class TestDebugPostgres(BaseDebugPostgres): + pass + + +class TestDebugInvalidProjectPostgres(BaseDebugInvalidProjectPostgres): + pass diff --git a/tests/functional/shared_tests/test_empty.py b/tests/functional/shared_tests/test_empty.py new file mode 100644 index 000000000..040e38154 --- /dev/null +++ b/tests/functional/shared_tests/test_empty.py @@ -0,0 +1,5 @@ +from dbt.tests.adapter.empty.test_empty import BaseTestEmpty + + +class TestEmpty(BaseTestEmpty): + pass diff --git a/tests/functional/shared_tests/test_ephemeral.py b/tests/functional/shared_tests/test_ephemeral.py new file mode 100644 index 000000000..220ec4203 --- /dev/null +++ b/tests/functional/shared_tests/test_ephemeral.py @@ -0,0 +1,17 @@ +from dbt.tests.adapter.ephemeral.test_ephemeral import ( + BaseEphemeralMulti, + BaseEphemeralNested, + BaseEphemeralErrorHandling, +) + + +class TestEphemeralMulti(BaseEphemeralMulti): + pass + + +class TestEphemeralNested(BaseEphemeralNested): + pass + + +class TestEphemeralErrorHandling(BaseEphemeralErrorHandling): + pass diff --git a/tests/functional/shared_tests/test_grants.py b/tests/functional/shared_tests/test_grants.py new file mode 100644 index 000000000..d91cd8ee6 --- /dev/null +++ b/tests/functional/shared_tests/test_grants.py @@ -0,0 +1,25 @@ +from dbt.tests.adapter.grants.test_incremental_grants import BaseIncrementalGrants +from dbt.tests.adapter.grants.test_invalid_grants import BaseInvalidGrants +from dbt.tests.adapter.grants.test_model_grants import BaseModelGrants +from dbt.tests.adapter.grants.test_seed_grants import BaseSeedGrants +from dbt.tests.adapter.grants.test_snapshot_grants import BaseSnapshotGrants + + +class TestIncrementalGrants(BaseIncrementalGrants): + pass + + +class TestInvalidGrants(BaseInvalidGrants): + pass + + +class TestModelGrants(BaseModelGrants): + pass + + +class TestSeedGrants(BaseSeedGrants): + pass + + +class TestSnapshotGrants(BaseSnapshotGrants): + pass diff --git a/tests/functional/shared_tests/test_hooks.py b/tests/functional/shared_tests/test_hooks.py new file mode 100644 index 000000000..3db138eb7 --- /dev/null +++ b/tests/functional/shared_tests/test_hooks.py @@ -0,0 +1,76 @@ +from dbt.tests.adapter.hooks.test_model_hooks import ( + BasePrePostModelHooks, + BaseHookRefs, + BasePrePostModelHooksOnSeeds, + BaseHooksRefsOnSeeds, + BasePrePostModelHooksOnSeedsPlusPrefixed, + BasePrePostModelHooksOnSeedsPlusPrefixedWhitespace, + BasePrePostModelHooksOnSnapshots, + BasePrePostModelHooksInConfig, + BasePrePostModelHooksInConfigWithCount, + BasePrePostModelHooksInConfigKwargs, + BasePrePostSnapshotHooksInConfigKwargs, + BaseDuplicateHooksInConfigs, +) +from dbt.tests.adapter.hooks.test_run_hooks import ( + BasePrePostRunHooks, + BaseAfterRunHooks, +) + + +class TestPrePostModelHooks(BasePrePostModelHooks): + pass + + +class TestHookRefs(BaseHookRefs): + pass + + +class TestPrePostModelHooksOnSeeds(BasePrePostModelHooksOnSeeds): + pass + + +class TestHooksRefsOnSeeds(BaseHooksRefsOnSeeds): + pass + + +class TestPrePostModelHooksOnSeedsPlusPrefixed(BasePrePostModelHooksOnSeedsPlusPrefixed): + pass + + +class TestPrePostModelHooksOnSeedsPlusPrefixedWhitespace( + BasePrePostModelHooksOnSeedsPlusPrefixedWhitespace +): + pass + + +class TestPrePostModelHooksOnSnapshots(BasePrePostModelHooksOnSnapshots): + pass + + +class TestPrePostModelHooksInConfig(BasePrePostModelHooksInConfig): + pass + + +class TestPrePostModelHooksInConfigWithCount(BasePrePostModelHooksInConfigWithCount): + pass + + +class TestPrePostModelHooksInConfigKwargs(BasePrePostModelHooksInConfigKwargs): + pass + + +class TestPrePostSnapshotHooksInConfigKwargs(BasePrePostSnapshotHooksInConfigKwargs): + pass + + +class TestDuplicateHooksInConfigs(BaseDuplicateHooksInConfigs): + pass + + +class TestPrePostRunHooks(BasePrePostRunHooks): + pass + + +class TestAfterRunHooks(BaseAfterRunHooks): + pass diff --git a/tests/functional/shared_tests/test_incremental.py b/tests/functional/shared_tests/test_incremental.py new file mode 100644 index 000000000..7782316ad --- /dev/null +++ b/tests/functional/shared_tests/test_incremental.py @@ -0,0 +1,24 @@ +from dbt.tests.adapter.incremental.test_incremental_merge_exclude_columns import ( + BaseMergeExcludeColumns, +) +from dbt.tests.adapter.incremental.test_incremental_on_schema_change import ( + BaseIncrementalOnSchemaChange, +) +from dbt.tests.adapter.incremental.test_incremental_predicates import BaseIncrementalPredicates +from dbt.tests.adapter.incremental.test_incremental_unique_id import BaseIncrementalUniqueKey + + +class TestBaseMergeExcludeColumns(BaseMergeExcludeColumns): + pass + + +class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange): + pass + + +class TestIncrementalPredicatesDeleteInsert(BaseIncrementalPredicates): + pass + + +class TestIncrementalUniqueKey(BaseIncrementalUniqueKey): + pass diff --git a/tests/functional/shared_tests/test_persist_docs.py b/tests/functional/shared_tests/test_persist_docs.py new file mode 100644 index 000000000..2653ca4ae --- /dev/null +++ b/tests/functional/shared_tests/test_persist_docs.py @@ -0,0 +1,17 @@ +from dbt.tests.adapter.persist_docs.test_persist_docs import ( + BasePersistDocs, + BasePersistDocsColumnMissing, + BasePersistDocsCommentOnQuotedColumn, +) + + +class TestPersistDocs(BasePersistDocs): + pass + + +class TestPersistDocsColumnMissing(BasePersistDocsColumnMissing): + pass + + +class TestPersistDocsCommentOnQuotedColumn(BasePersistDocsCommentOnQuotedColumn): + pass diff --git a/tests/functional/shared_tests/test_query_comment.py b/tests/functional/shared_tests/test_query_comment.py new file mode 100644 index 000000000..30399b249 --- /dev/null +++ b/tests/functional/shared_tests/test_query_comment.py @@ -0,0 +1,32 @@ +from dbt.tests.adapter.query_comment.test_query_comment import ( + BaseQueryComments, + BaseMacroQueryComments, + BaseMacroArgsQueryComments, + BaseMacroInvalidQueryComments, + BaseNullQueryComments, + BaseEmptyQueryComments, +) + + +class TestQueryComments(BaseQueryComments): + pass + + +class TestMacroQueryComments(BaseMacroQueryComments): + pass + + +class TestMacroArgsQueryComments(BaseMacroArgsQueryComments): + pass + + +class TestMacroInvalidQueryComments(BaseMacroInvalidQueryComments): + pass + + +class TestNullQueryComments(BaseNullQueryComments): + pass + + +class TestEmptyQueryComments(BaseEmptyQueryComments): + pass diff --git a/tests/functional/shared_tests/test_relations.py b/tests/functional/shared_tests/test_relations.py new file mode 100644 index 000000000..213eff507 --- /dev/null +++ b/tests/functional/shared_tests/test_relations.py @@ -0,0 +1,10 @@ +from dbt.tests.adapter.relations.test_changing_relation_type import BaseChangeRelationTypeValidator +from dbt.tests.adapter.relations.test_dropping_schema_named import BaseDropSchemaNamed + + +class TestChangeRelationTypes(BaseChangeRelationTypeValidator): + pass + + +class TestDropSchemaNamed(BaseDropSchemaNamed): + pass diff --git a/tests/functional/shared_tests/test_show.py b/tests/functional/shared_tests/test_show.py new file mode 100644 index 000000000..47974a04c --- /dev/null +++ b/tests/functional/shared_tests/test_show.py @@ -0,0 +1,12 @@ +from dbt.tests.adapter.dbt_show.test_dbt_show import ( + BaseShowLimit, + BaseShowSqlHeader, +) + + +class TestPostgresShowSqlHeader(BaseShowSqlHeader): + pass + + +class TestPostgresShowLimit(BaseShowLimit): + pass diff --git a/tests/functional/shared_tests/test_simple_copy.py b/tests/functional/shared_tests/test_simple_copy.py new file mode 100644 index 000000000..ad68f1633 --- /dev/null +++ b/tests/functional/shared_tests/test_simple_copy.py @@ -0,0 +1,17 @@ +from dbt.tests.adapter.simple_copy.test_copy_uppercase import BaseSimpleCopyUppercase +from dbt.tests.adapter.simple_copy.test_simple_copy import ( + SimpleCopyBase, + EmptyModelsArentRunBase, +) + + +class TestSimpleCopyUppercase(BaseSimpleCopyUppercase): + pass + + +class TestSimpleCopyBase(SimpleCopyBase): + pass + + +class TestEmptyModelsArentRun(EmptyModelsArentRunBase): + pass diff --git a/tests/functional/shared_tests/test_simple_seed.py b/tests/functional/shared_tests/test_simple_seed.py new file mode 100644 index 000000000..97b8870a1 --- /dev/null +++ b/tests/functional/shared_tests/test_simple_seed.py @@ -0,0 +1,69 @@ +from dbt.tests.adapter.simple_seed.test_seed import ( + BaseBasicSeedTests, + BaseSeedConfigFullRefreshOn, + BaseSeedConfigFullRefreshOff, + BaseSeedCustomSchema, + BaseSeedWithUniqueDelimiter, + BaseSeedWithWrongDelimiter, + BaseSeedWithEmptyDelimiter, + BaseSimpleSeedEnabledViaConfig, + BaseSeedParsing, + BaseSimpleSeedWithBOM, + BaseSeedSpecificFormats, + BaseTestEmptySeed, +) +from dbt.tests.adapter.simple_seed.test_seed_type_override import ( + BaseSimpleSeedColumnOverride, +) + + +class TestBasicSeedTests(BaseBasicSeedTests): + pass + + +class TestSeedConfigFullRefreshOn(BaseSeedConfigFullRefreshOn): + pass + + +class TestSeedConfigFullRefreshOff(BaseSeedConfigFullRefreshOff): + pass + + +class TestSeedCustomSchema(BaseSeedCustomSchema): + pass + + +class TestSeedWithUniqueDelimiter(BaseSeedWithUniqueDelimiter): + pass + + +class TestSeedWithWrongDelimiter(BaseSeedWithWrongDelimiter): + pass + + +class TestSeedWithEmptyDelimiter(BaseSeedWithEmptyDelimiter): + pass + + +class TestSimpleSeedEnabledViaConfig(BaseSimpleSeedEnabledViaConfig): + pass + + +class TestSeedParsing(BaseSeedParsing): + pass + + +class TestSimpleSeedWithBOM(BaseSimpleSeedWithBOM): + pass + + +class TestSeedSpecificFormats(BaseSeedSpecificFormats): + pass + + +class TestEmptySeed(BaseTestEmptySeed): + pass + + +class TestSimpleSeedColumnOverride(BaseSimpleSeedColumnOverride): + pass diff --git a/tests/functional/shared_tests/test_simple_snapshot.py b/tests/functional/shared_tests/test_simple_snapshot.py new file mode 100644 index 000000000..879c7b9c7 --- /dev/null +++ b/tests/functional/shared_tests/test_simple_snapshot.py @@ -0,0 +1,12 @@ +from dbt.tests.adapter.simple_snapshot.test_snapshot import ( + BaseSimpleSnapshot, + BaseSnapshotCheck, +) + + +class TestSnapshot(BaseSimpleSnapshot): + pass + + +class TestSnapshotCheck(BaseSnapshotCheck): + pass diff --git a/tests/functional/shared_tests/test_store_test_failures.py b/tests/functional/shared_tests/test_store_test_failures.py new file mode 100644 index 000000000..9d7ae39e2 --- /dev/null +++ b/tests/functional/shared_tests/test_store_test_failures.py @@ -0,0 +1,7 @@ +from dbt.tests.adapter.store_test_failures_tests.test_store_test_failures import ( + BaseStoreTestFailures, +) + + +class TestStoreTestFailures(BaseStoreTestFailures): + pass diff --git a/tests/functional/shared_tests/test_unit_testing.py b/tests/functional/shared_tests/test_unit_testing.py new file mode 100644 index 000000000..357da877b --- /dev/null +++ b/tests/functional/shared_tests/test_unit_testing.py @@ -0,0 +1,15 @@ +from dbt.tests.adapter.unit_testing.test_case_insensitivity import BaseUnitTestCaseInsensivity +from dbt.tests.adapter.unit_testing.test_invalid_input import BaseUnitTestInvalidInput +from dbt.tests.adapter.unit_testing.test_types import BaseUnitTestingTypes + + +class TestPostgresUnitTestCaseInsensitivity(BaseUnitTestCaseInsensivity): + pass + + +class TestPostgresUnitTestInvalidInput(BaseUnitTestInvalidInput): + pass + + +class TestPostgresUnitTestingTypes(BaseUnitTestingTypes): + pass diff --git a/tests/functional/shared_tests/test_utils.py b/tests/functional/shared_tests/test_utils.py new file mode 100644 index 000000000..9934a240a --- /dev/null +++ b/tests/functional/shared_tests/test_utils.py @@ -0,0 +1,174 @@ +import pytest + +from dbt.tests.adapter.utils.test_any_value import BaseAnyValue +from dbt.tests.adapter.utils.test_array_append import BaseArrayAppend +from dbt.tests.adapter.utils.test_array_concat import BaseArrayConcat +from dbt.tests.adapter.utils.test_array_construct import BaseArrayConstruct +from dbt.tests.adapter.utils.test_bool_or import BaseBoolOr +from dbt.tests.adapter.utils.test_cast_bool_to_text import BaseCastBoolToText +from dbt.tests.adapter.utils.test_concat import BaseConcat +from dbt.tests.adapter.utils.test_current_timestamp import BaseCurrentTimestampAware +from dbt.tests.adapter.utils.test_dateadd import BaseDateAdd +from dbt.tests.adapter.utils.test_datediff import BaseDateDiff +from dbt.tests.adapter.utils.test_date_spine import BaseDateSpine +from dbt.tests.adapter.utils.test_date_trunc import BaseDateTrunc +from dbt.tests.adapter.utils.test_equals import BaseEquals +from dbt.tests.adapter.utils.test_escape_single_quotes import ( + BaseEscapeSingleQuotesQuote, + BaseEscapeSingleQuotesBackslash, +) +from dbt.tests.adapter.utils.test_except import BaseExcept +from dbt.tests.adapter.utils.test_generate_series import BaseGenerateSeries +from dbt.tests.adapter.utils.test_get_intervals_between import BaseGetIntervalsBetween +from dbt.tests.adapter.utils.test_get_powers_of_two import BaseGetPowersOfTwo +from dbt.tests.adapter.utils.test_hash import BaseHash +from dbt.tests.adapter.utils.test_intersect import BaseIntersect +from dbt.tests.adapter.utils.test_last_day import BaseLastDay +from dbt.tests.adapter.utils.test_length import BaseLength +from dbt.tests.adapter.utils.test_listagg import BaseListagg +from dbt.tests.adapter.utils.test_null_compare import BaseNullCompare, BaseMixedNullCompare +from dbt.tests.adapter.utils.test_position import BasePosition +from dbt.tests.adapter.utils.test_replace import BaseReplace +from dbt.tests.adapter.utils.test_right import BaseRight +from dbt.tests.adapter.utils.test_safe_cast import BaseSafeCast +from dbt.tests.adapter.utils.test_split_part import BaseSplitPart +from dbt.tests.adapter.utils.test_string_literal import BaseStringLiteral +from dbt.tests.adapter.utils.test_timestamps import BaseCurrentTimestamps +from dbt.tests.adapter.utils.test_validate_sql import BaseValidateSqlMethod + + +class TestAnyValue(BaseAnyValue): + pass + + +class TestArrayAppend(BaseArrayAppend): + pass + + +class TestArrayConcat(BaseArrayConcat): + pass + + +class TestArrayConstruct(BaseArrayConstruct): + pass + + +class TestBoolOr(BaseBoolOr): + pass + + +class TestCastBoolToText(BaseCastBoolToText): + pass + + +class TestConcat(BaseConcat): + pass + + +class TestCurrentTimestamp(BaseCurrentTimestampAware): + pass + + +class TestDateSpine(BaseDateSpine): + pass + + +class TestDateTrunc(BaseDateTrunc): + pass + + +class TestDateAdd(BaseDateAdd): + pass + + +class TestDateDiff(BaseDateDiff): + pass + + +class TestEquals(BaseEquals): + pass + + +class TestEscapeSingleQuotesQuote(BaseEscapeSingleQuotesQuote): + pass + + +@pytest.mark.skip("Not implemented in `dbt-postgres<1.8`, fails in `dbt-postgres>=1.8`") +class TestEscapeSingleQuotesBackslash(BaseEscapeSingleQuotesBackslash): + pass + + +class TestExcept(BaseExcept): + pass + + +class TestGenerateSeries(BaseGenerateSeries): + pass + + +class TestGetIntervalsBetween(BaseGetIntervalsBetween): + pass + + +class TestGetPowersOfTwo(BaseGetPowersOfTwo): + pass + + +class TestHash(BaseHash): + pass + + +class TestIntersect(BaseIntersect): + pass + + +class TestLastDay(BaseLastDay): + pass + + +class TestLength(BaseLength): + pass + + +class TestListagg(BaseListagg): + pass + + +class TestMixedNullCompare(BaseMixedNullCompare): + pass + + +class TestNullCompare(BaseNullCompare): + pass + + +class TestPosition(BasePosition): + pass + + +class TestReplace(BaseReplace): + pass + + +class TestRight(BaseRight): + pass + + +class TestSafeCast(BaseSafeCast): + pass + + +class TestSplitPart(BaseSplitPart): + pass + + +class TestStringLiteral(BaseStringLiteral): + pass + + +class TestCurrentTimestamps(BaseCurrentTimestamps): + pass + + +class TestValidateSqlMethod(BaseValidateSqlMethod): + pass From 809704c152c546a3c224e3780ddb35980f11347f Mon Sep 17 00:00:00 2001 From: Michelle Ark <MichelleArk@users.noreply.github.com> Date: Tue, 27 Feb 2024 15:37:52 -0500 Subject: [PATCH 035/114] Bump dbt-common<2.0 (#25) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6b58ab143..6b67b91aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ dependencies = [ "dbt-adapters>=0.1.0a6,<0.2.0", "psycopg2>=2.9,<3.0", # installed via dbt-adapters but used directly - "dbt-common<1.0", + "dbt-common<2.0", "agate>=1.0,<2.0", ] [project.urls] From 71102f8a0d5539206e8f4938a45659ce0e0eda0f Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 27 Feb 2024 17:32:49 -0500 Subject: [PATCH 036/114] Remove saved query tests, they belong in dbt-core (#26) This PR only removed test cases. Failed tests are resolved via https://github.com/dbt-labs/dbt-postgres/pull/21. --- tests/functional/saved_queries/fixtures.py | 93 --------- .../saved_queries/test_saved_query_build.py | 41 ---- .../saved_queries/test_saved_query_configs.py | 186 ------------------ .../saved_queries/test_saved_query_parsing.py | 113 ----------- 4 files changed, 433 deletions(-) delete mode 100644 tests/functional/saved_queries/fixtures.py delete mode 100644 tests/functional/saved_queries/test_saved_query_build.py delete mode 100644 tests/functional/saved_queries/test_saved_query_configs.py delete mode 100644 tests/functional/saved_queries/test_saved_query_parsing.py diff --git a/tests/functional/saved_queries/fixtures.py b/tests/functional/saved_queries/fixtures.py deleted file mode 100644 index 68565d82e..000000000 --- a/tests/functional/saved_queries/fixtures.py +++ /dev/null @@ -1,93 +0,0 @@ -saved_query_description = """ -{% docs saved_query_description %} My SavedQuery Description {% enddocs %} -""" - -saved_queries_yml = """ -version: 2 - -saved_queries: - - name: test_saved_query - description: "{{ doc('saved_query_description') }}" - label: Test Saved Query - query_params: - metrics: - - simple_metric - group_by: - - "Dimension('user__ds')" - where: - - "{{ Dimension('user__ds', 'DAY') }} <= now()" - - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" - exports: - - name: my_export - config: - alias: my_export_alias - export_as: table - schema: my_export_schema_name -""" - -saved_query_with_extra_config_attributes_yml = """ -version: 2 - -saved_queries: - - name: test_saved_query - description: "{{ doc('saved_query_description') }}" - label: Test Saved Query - query_params: - metrics: - - simple_metric - group_by: - - "Dimension('user__ds')" - where: - - "{{ Dimension('user__ds', 'DAY') }} <= now()" - - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" - exports: - - name: my_export - config: - my_random_config: 'I have this for some reason' - export_as: table -""" - -saved_query_with_export_configs_defined_at_saved_query_level_yml = """ -version: 2 - -saved_queries: - - name: test_saved_query - description: "{{ doc('saved_query_description') }}" - label: Test Saved Query - config: - export_as: table - schema: my_default_export_schema - query_params: - metrics: - - simple_metric - group_by: - - "Dimension('user__ds')" - where: - - "{{ Dimension('user__ds', 'DAY') }} <= now()" - - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" - exports: - - name: my_export - config: - export_as: view - schema: my_custom_export_schema - - name: my_export2 -""" - -saved_query_without_export_configs_defined_yml = """ -version: 2 - -saved_queries: - - name: test_saved_query - description: "{{ doc('saved_query_description') }}" - label: Test Saved Query - query_params: - metrics: - - simple_metric - group_by: - - "Dimension('user__ds')" - where: - - "{{ Dimension('user__ds', 'DAY') }} <= now()" - - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" - exports: - - name: my_export -""" diff --git a/tests/functional/saved_queries/test_saved_query_build.py b/tests/functional/saved_queries/test_saved_query_build.py deleted file mode 100644 index 19787e71a..000000000 --- a/tests/functional/saved_queries/test_saved_query_build.py +++ /dev/null @@ -1,41 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.saved_queries.fixtures import ( - saved_queries_yml, - saved_query_description, -) -from tests.functional.semantic_models.fixtures import ( - fct_revenue_sql, - metricflow_time_spine_sql, - schema_yml, -) - - -class TestSavedQueryBuildNoOp: - @pytest.fixture(scope="class") - def models(self): - return { - "saved_queries.yml": saved_queries_yml, - "schema.yml": schema_yml, - "fct_revenue.sql": fct_revenue_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "docs.md": saved_query_description, - } - - @pytest.fixture(scope="class") - def packages(self): - return """ -packages: - - package: dbt-labs/dbt_utils - version: 1.1.1 -""" - - def test_semantic_model_parsing(self, project): - run_dbt(["deps"]) - result = run_dbt(["build"]) - assert len(result.results) == 2 - assert "test_saved_query" not in [r.node.name for r in result.results] - result = run_dbt(["build", "--include-saved-query"]) - assert len(result.results) == 3 - assert "test_saved_query" in [r.node.name for r in result.results] diff --git a/tests/functional/saved_queries/test_saved_query_configs.py b/tests/functional/saved_queries/test_saved_query_configs.py deleted file mode 100644 index 685c54c2b..000000000 --- a/tests/functional/saved_queries/test_saved_query_configs.py +++ /dev/null @@ -1,186 +0,0 @@ -from dbt.contracts.graph.manifest import Manifest -from dbt.tests.util import update_config_file -from dbt_semantic_interfaces.type_enums.export_destination_type import ExportDestinationType -import pytest - -from tests.functional.configs.fixtures import BaseConfigProject -from tests.functional.dbt_runner import dbtTestRunner -from tests.functional.saved_queries.fixtures import ( - saved_queries_yml, - saved_query_description, - saved_query_with_export_configs_defined_at_saved_query_level_yml, - saved_query_with_extra_config_attributes_yml, - saved_query_without_export_configs_defined_yml, -) -from tests.functional.semantic_models.fixtures import ( - fct_revenue_sql, - metricflow_time_spine_sql, - schema_yml, -) - - -class TestSavedQueryConfigs(BaseConfigProject): - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "saved-queries": { - "test": { - "test_saved_query": { - "+enabled": True, - "+export_as": ExportDestinationType.VIEW.value, - "+schema": "my_default_export_schema", - } - }, - }, - } - - @pytest.fixture(scope="class") - def models(self): - return { - "saved_queries.yml": saved_query_with_extra_config_attributes_yml, - "schema.yml": schema_yml, - "fct_revenue.sql": fct_revenue_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "docs.md": saved_query_description, - } - - def test_basic_saved_query_config( - self, - project, - ): - runner = dbtTestRunner() - - # parse with default fixture project config - result = runner.invoke(["parse"]) - assert result.success - assert isinstance(result.result, Manifest) - assert len(result.result.saved_queries) == 1 - saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] - assert saved_query.config.export_as == ExportDestinationType.VIEW - assert saved_query.config.schema == "my_default_export_schema" - - # disable the saved_query via project config and rerun - config_patch = {"saved-queries": {"test": {"test_saved_query": {"+enabled": False}}}} - update_config_file(config_patch, project.project_root, "dbt_project.yml") - result = runner.invoke(["parse"]) - assert result.success - assert len(result.result.saved_queries) == 0 - - -class TestExportConfigsWithAdditionalProperties(BaseConfigProject): - @pytest.fixture(scope="class") - def models(self): - return { - "saved_queries.yml": saved_queries_yml, - "schema.yml": schema_yml, - "fct_revenue.sql": fct_revenue_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "docs.md": saved_query_description, - } - - def test_extra_config_properties_dont_break_parsing(self, project): - runner = dbtTestRunner() - - # parse with default fixture project config - result = runner.invoke(["parse"]) - assert result.success - assert isinstance(result.result, Manifest) - assert len(result.result.saved_queries) == 1 - saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] - assert len(saved_query.exports) == 1 - assert saved_query.exports[0].config.__dict__.get("my_random_config") is None - - -class TestInheritingExportConfigFromSavedQueryConfig(BaseConfigProject): - @pytest.fixture(scope="class") - def models(self): - return { - "saved_queries.yml": saved_query_with_export_configs_defined_at_saved_query_level_yml, - "schema.yml": schema_yml, - "fct_revenue.sql": fct_revenue_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "docs.md": saved_query_description, - } - - def test_export_config_inherits_from_saved_query(self, project): - runner = dbtTestRunner() - - # parse with default fixture project config - result = runner.invoke(["parse"]) - assert result.success - assert isinstance(result.result, Manifest) - assert len(result.result.saved_queries) == 1 - saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] - assert len(saved_query.exports) == 2 - - # assert Export `my_export` has its configs defined from itself because they should take priority - export1 = next( - (export for export in saved_query.exports if export.name == "my_export"), None - ) - assert export1 is not None - assert export1.config.export_as == ExportDestinationType.VIEW - assert export1.config.export_as != saved_query.config.export_as - assert export1.config.schema_name == "my_custom_export_schema" - assert export1.config.schema_name != saved_query.config.schema - - # assert Export `my_export` has its configs defined from the saved_query because they should take priority - export2 = next( - (export for export in saved_query.exports if export.name == "my_export2"), None - ) - assert export2 is not None - assert export2.config.export_as == ExportDestinationType.TABLE - assert export2.config.export_as == saved_query.config.export_as - assert export2.config.schema_name == "my_default_export_schema" - assert export2.config.schema_name == saved_query.config.schema - - -class TestInheritingExportConfigsFromProject(BaseConfigProject): - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "saved-queries": { - "test": { - "test_saved_query": { - "+export_as": ExportDestinationType.VIEW.value, - } - }, - }, - } - - @pytest.fixture(scope="class") - def models(self): - return { - "saved_queries.yml": saved_query_without_export_configs_defined_yml, - "schema.yml": schema_yml, - "fct_revenue.sql": fct_revenue_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "docs.md": saved_query_description, - } - - def test_export_config_inherits_from_project( - self, - project, - ): - runner = dbtTestRunner() - - # parse with default fixture project config - result = runner.invoke(["parse"]) - assert result.success - assert isinstance(result.result, Manifest) - assert len(result.result.saved_queries) == 1 - saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] - assert saved_query.config.export_as == ExportDestinationType.VIEW - - # change export's `export_as` to `TABLE` via project config - config_patch = { - "saved-queries": { - "test": {"test_saved_query": {"+export_as": ExportDestinationType.TABLE.value}} - } - } - update_config_file(config_patch, project.project_root, "dbt_project.yml") - result = runner.invoke(["parse"]) - assert result.success - assert isinstance(result.result, Manifest) - assert len(result.result.saved_queries) == 1 - saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] - assert saved_query.config.export_as == ExportDestinationType.TABLE diff --git a/tests/functional/saved_queries/test_saved_query_parsing.py b/tests/functional/saved_queries/test_saved_query_parsing.py deleted file mode 100644 index 73f63f1ea..000000000 --- a/tests/functional/saved_queries/test_saved_query_parsing.py +++ /dev/null @@ -1,113 +0,0 @@ -from typing import List - -from dbt.contracts.graph.manifest import Manifest -from dbt.tests.util import write_file -from dbt_common.events.base_types import BaseEvent -from dbt_semantic_interfaces.type_enums.export_destination_type import ExportDestinationType -import pytest - -from tests.functional.dbt_runner import dbtTestRunner -from tests.functional.saved_queries.fixtures import ( - saved_queries_yml, - saved_query_description, -) -from tests.functional.semantic_models.fixtures import ( - fct_revenue_sql, - metricflow_time_spine_sql, - schema_yml, -) - - -class TestSavedQueryParsing: - @pytest.fixture(scope="class") - def models(self): - return { - "saved_queries.yml": saved_queries_yml, - "schema.yml": schema_yml, - "fct_revenue.sql": fct_revenue_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "docs.md": saved_query_description, - } - - def test_semantic_model_parsing(self, project): - runner = dbtTestRunner() - result = runner.invoke(["parse", "--no-partial-parse"]) - assert result.success - assert isinstance(result.result, Manifest) - manifest = result.result - assert len(manifest.saved_queries) == 1 - saved_query = manifest.saved_queries["saved_query.test.test_saved_query"] - assert saved_query.name == "test_saved_query" - assert len(saved_query.query_params.metrics) == 1 - assert len(saved_query.query_params.group_by) == 1 - assert len(saved_query.query_params.where.where_filters) == 2 - assert len(saved_query.depends_on.nodes) == 1 - assert saved_query.description == "My SavedQuery Description" - assert len(saved_query.exports) == 1 - assert saved_query.exports[0].name == "my_export" - assert saved_query.exports[0].config.alias == "my_export_alias" - assert saved_query.exports[0].config.export_as == ExportDestinationType.TABLE - assert saved_query.exports[0].config.schema_name == "my_export_schema_name" - - def test_saved_query_error(self, project): - error_schema_yml = saved_queries_yml.replace("simple_metric", "metric_not_found") - write_file(error_schema_yml, project.project_root, "models", "saved_queries.yml") - events: List[BaseEvent] = [] - runner = dbtTestRunner(callbacks=[events.append]) - - result = runner.invoke(["parse", "--no-partial-parse"]) - assert not result.success - validation_errors = [e for e in events if e.info.name == "MainEncounteredError"] - assert validation_errors - - -class TestSavedQueryPartialParsing: - @pytest.fixture(scope="class") - def models(self): - return { - "saved_queries.yml": saved_queries_yml, - "schema.yml": schema_yml, - "fct_revenue.sql": fct_revenue_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "docs.md": saved_query_description, - } - - def test_saved_query_metrics_changed(self, project): - # First, use the default saved_queries.yml to define our saved_queries, and - # run the dbt parse command - runner = dbtTestRunner() - result = runner.invoke(["parse"]) - assert result.success - - # Next, modify the default saved_queries.yml to change a detail of the saved - # query. - modified_saved_queries_yml = saved_queries_yml.replace("simple_metric", "txn_revenue") - write_file(modified_saved_queries_yml, project.project_root, "models", "saved_queries.yml") - - # Now, run the dbt parse command again. - result = runner.invoke(["parse"]) - assert result.success - - # Finally, verify that the manifest reflects the partially parsed change - manifest = result.result - saved_query = manifest.saved_queries["saved_query.test.test_saved_query"] - assert len(saved_query.metrics) == 1 - assert saved_query.metrics[0] == "txn_revenue" - - def test_saved_query_deleted_partial_parsing(self, project): - # First, use the default saved_queries.yml to define our saved_query, and - # run the dbt parse command - runner = dbtTestRunner() - result = runner.invoke(["parse"]) - assert result.success - assert "saved_query.test.test_saved_query" in result.result.saved_queries - - # Next, modify the default saved_queries.yml to remove the saved query. - write_file("", project.project_root, "models", "saved_queries.yml") - - # Now, run the dbt parse command again. - result = runner.invoke(["parse"]) - assert result.success - - # Finally, verify that the manifest reflects the deletion - assert "saved_query.test.test_saved_query" not in result.result.saved_queries From dd4d8dade6908fc2ddf21eb39b8abbe4693d15ad Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 27 Feb 2024 18:33:01 -0500 Subject: [PATCH 037/114] Update dependencies for 1.0.0b1 (#27) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 6b67b91aa..e7e283c25 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", ] dependencies = [ - "dbt-adapters>=0.1.0a6,<0.2.0", + "dbt-adapters>=0.1.0a1,<2.0", "psycopg2>=2.9,<3.0", # installed via dbt-adapters but used directly "dbt-common<2.0", From c2e7104a6405f166b9ceb871ca7edbcbf3602824 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 27 Feb 2024 19:10:23 -0500 Subject: [PATCH 038/114] Fix testing migration (#21) --- pyproject.toml | 2 +- .../functional/shared_tests/test_column_types.py | 4 ++-- .../shared_tests/test_hooks/data/seed_model.sql | 16 ++++++++++++++++ .../shared_tests/test_hooks/data/seed_run.sql | 16 ++++++++++++++++ .../shared_tests/{ => test_hooks}/test_hooks.py | 4 ++++ .../{ => test_simple_seed}/seed_bom.csv | 0 .../{ => test_simple_seed}/test_simple_seed.py | 4 ++++ 7 files changed, 43 insertions(+), 3 deletions(-) create mode 100644 tests/functional/shared_tests/test_hooks/data/seed_model.sql create mode 100644 tests/functional/shared_tests/test_hooks/data/seed_run.sql rename tests/functional/shared_tests/{ => test_hooks}/test_hooks.py (92%) rename tests/functional/shared_tests/{ => test_simple_seed}/seed_bom.csv (100%) rename tests/functional/shared_tests/{ => test_simple_seed}/test_simple_seed.py (89%) diff --git a/pyproject.toml b/pyproject.toml index e7e283c25..acef4e330 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ dependencies = [ "dbt-adapters>=0.1.0a1,<2.0", "psycopg2>=2.9,<3.0", # installed via dbt-adapters but used directly - "dbt-common<2.0", + "dbt-common>=0.1.0a1,<2.0", "agate>=1.0,<2.0", ] [project.urls] diff --git a/tests/functional/shared_tests/test_column_types.py b/tests/functional/shared_tests/test_column_types.py index f5037860b..47dec2454 100644 --- a/tests/functional/shared_tests/test_column_types.py +++ b/tests/functional/shared_tests/test_column_types.py @@ -1,5 +1,5 @@ -from dbt.tests.adapter.column_types.test_column_types import BaseColumnTypes +from dbt.tests.adapter.column_types.test_column_types import BasePostgresColumnTypes -class TestPostgresColumnTypes(BaseColumnTypes): +class TestPostgresColumnTypes(BasePostgresColumnTypes): pass diff --git a/tests/functional/shared_tests/test_hooks/data/seed_model.sql b/tests/functional/shared_tests/test_hooks/data/seed_model.sql new file mode 100644 index 000000000..6727acb33 --- /dev/null +++ b/tests/functional/shared_tests/test_hooks/data/seed_model.sql @@ -0,0 +1,16 @@ +drop table if exists {schema}.on_model_hook; + +create table {schema}.on_model_hook ( + test_state TEXT, -- start|end + target_dbname TEXT, + target_host TEXT, + target_name TEXT, + target_schema TEXT, + target_type TEXT, + target_user TEXT, + target_pass TEXT, + target_threads INTEGER, + run_started_at TEXT, + invocation_id TEXT, + thread_id TEXT +); diff --git a/tests/functional/shared_tests/test_hooks/data/seed_run.sql b/tests/functional/shared_tests/test_hooks/data/seed_run.sql new file mode 100644 index 000000000..079ed34a2 --- /dev/null +++ b/tests/functional/shared_tests/test_hooks/data/seed_run.sql @@ -0,0 +1,16 @@ +drop table if exists {schema}.on_run_hook; + +create table {schema}.on_run_hook ( + test_state TEXT, -- start|end + target_dbname TEXT, + target_host TEXT, + target_name TEXT, + target_schema TEXT, + target_type TEXT, + target_user TEXT, + target_pass TEXT, + target_threads INTEGER, + run_started_at TEXT, + invocation_id TEXT, + thread_id TEXT +); diff --git a/tests/functional/shared_tests/test_hooks.py b/tests/functional/shared_tests/test_hooks/test_hooks.py similarity index 92% rename from tests/functional/shared_tests/test_hooks.py rename to tests/functional/shared_tests/test_hooks/test_hooks.py index 3db138eb7..843811307 100644 --- a/tests/functional/shared_tests/test_hooks.py +++ b/tests/functional/shared_tests/test_hooks/test_hooks.py @@ -1,3 +1,7 @@ +""" +This file needs to be in its own directory because it uses a `data` directory. +Placing this file in its own directory avoids collisions. +""" from dbt.tests.adapter.hooks.test_model_hooks import ( BasePrePostModelHooks, BaseHookRefs, diff --git a/tests/functional/shared_tests/seed_bom.csv b/tests/functional/shared_tests/test_simple_seed/seed_bom.csv similarity index 100% rename from tests/functional/shared_tests/seed_bom.csv rename to tests/functional/shared_tests/test_simple_seed/seed_bom.csv diff --git a/tests/functional/shared_tests/test_simple_seed.py b/tests/functional/shared_tests/test_simple_seed/test_simple_seed.py similarity index 89% rename from tests/functional/shared_tests/test_simple_seed.py rename to tests/functional/shared_tests/test_simple_seed/test_simple_seed.py index 97b8870a1..cd8497883 100644 --- a/tests/functional/shared_tests/test_simple_seed.py +++ b/tests/functional/shared_tests/test_simple_seed/test_simple_seed.py @@ -1,3 +1,7 @@ +""" +This file needs to be in its own directory because it creates a `data` directory at run time. +Placing this file in its own directory avoids collisions. +""" from dbt.tests.adapter.simple_seed.test_seed import ( BaseBasicSeedTests, BaseSeedConfigFullRefreshOn, From ce32e05bc35060cd97f334c44b5841ddefc9bddd Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Fri, 1 Mar 2024 17:20:24 -0500 Subject: [PATCH 039/114] [Version bump] 1.8.0b1 (#30) --- dbt/adapters/postgres/__version__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbt/adapters/postgres/__version__.py b/dbt/adapters/postgres/__version__.py index f15b401d1..6496f3e22 100644 --- a/dbt/adapters/postgres/__version__.py +++ b/dbt/adapters/postgres/__version__.py @@ -1 +1 @@ -version = "1.8.0a1" +version = "1.8.0b1" From 59103164c58afeefcf209f56df2ed38a714e74c5 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Fri, 1 Mar 2024 18:10:10 -0500 Subject: [PATCH 040/114] Use appropriate archive name (#31) --- .github/workflows/release.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ccf805f5b..695424658 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -37,10 +37,20 @@ jobs: - name: Setup `hatch` uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main + - name: Inputs + id: release-inputs + run: | + version=$(hatch version) + archive_name=dbt-postgres-$version-${{ inputs.deploy-to }} + echo "archive-name=$archive_name" >> $GITHUB_OUTPUT + - name: Build `dbt-postgres` uses: dbt-labs/dbt-adapters/.github/actions/build-hatch@main + with: + archive-name: ${{ steps.release-inputs.outputs.archive-name }} - name: Publish to PyPI uses: dbt-labs/dbt-adapters/.github/actions/publish-pypi@main with: pypi-repository-url: ${{ vars.PYPI_REPOSITORY_URL }} + archive-name: ${{ steps.release-inputs.outputs.archive-name }} From 03e39d8fcdf5caa931019411142bf1dbef114502 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Tue, 19 Mar 2024 12:28:36 -0700 Subject: [PATCH 041/114] Add dummy file. (#33) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> --- .github/workflows/release-internal.yml | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/workflows/release-internal.yml diff --git a/.github/workflows/release-internal.yml b/.github/workflows/release-internal.yml new file mode 100644 index 000000000..d868682ac --- /dev/null +++ b/.github/workflows/release-internal.yml @@ -0,0 +1 @@ +# this is an empty file From c18585f33986ce605691fc38dab1576e351bc875 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Wed, 20 Mar 2024 13:48:48 -0700 Subject: [PATCH 042/114] Add release internal workflow (#35) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> --- .github/workflows/release-internal.yml | 55 +++++++++++++++++++++++++- 1 file changed, 54 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release-internal.yml b/.github/workflows/release-internal.yml index d868682ac..d560787ee 100644 --- a/.github/workflows/release-internal.yml +++ b/.github/workflows/release-internal.yml @@ -1 +1,54 @@ -# this is an empty file +# What? +# +# Send a sha as a fully fledged relase to an internal archive for further processing. +# +# How? +# +# Checkout the sha +# Test it +# Build it +# Upload it +# +# When? +# +# Manual trigger + +name: Release internal patch + +on: + workflow_dispatch: + inputs: + version_number: + description: "The release version number (i.e. 1.0.0b1)" + type: string + required: true + sha: + description: "The sha to use (leave empty to use latest on main)" + type: string + required: false + package_test_command: + description: "Package test command" + type: string + default: "python -c \"import dbt.adapters.postgres\"" + required: true + +defaults: + run: + shell: "bash" + +env: + PYTHON_TARGET_VERSION: 3.11 + +jobs: + invoke-reusable-workflow: + name: "Build and Release Internally" + + uses: "dbt-labs/dbt-release/.github/workflows/internal-archive-release.yml@mp/finish_internal_workflow" + + with: + version_number: "${{ inputs.version_number }}" + package_test_command: "${{ inputs.package_test_command }}" + dbms_name: "postgres" + sha: "${{ inputs.sha }}" + + secrets: "inherit" From dd574db53be885d9cfa746008657463372882505 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 20 Mar 2024 17:40:36 -0400 Subject: [PATCH 043/114] Remove tests inadvertently migrated during the core-adapter de-coupling (#34) --- tests/functional/dependencies/data/seed.sql | 586 --------------- tests/functional/dependencies/data/update.sql | 7 - .../duplicate_dependency/dbt_project.yml | 5 - .../early_hook_dependency/dbt_project.yml | 6 - .../late_hook_dependency/dbt_project.yml | 6 - .../local_dependency/dbt_project.yml | 23 - .../local_dependency/macros/dep_macro.sql | 3 - .../macros/generate_schema_name.sql | 15 - .../models/model_to_import.sql | 1 - .../local_dependency/models/schema.yml | 11 - .../local_dependency/seeds/seed.csv | 2 - .../models_local/dep_source_model.sql | 2 - .../models_local/my_configured_model.sql | 4 - .../dependencies/models_local/my_model.sql | 2 - .../dependencies/models_local/schema.yml | 7 - .../models_local/source_override_model.sql | 2 - .../dependencies/test_dependency_options.py | 106 --- .../dependencies/test_local_dependency.py | 356 ---------- .../dependencies/test_simple_dependency.py | 435 ------------ .../test_simple_dependency_with_configs.py | 106 --- tests/functional/metrics/fixtures.py | 666 ------------------ .../functional/metrics/test_metric_configs.py | 206 ------ .../metrics/test_metric_deferral.py | 83 --- .../metrics/test_metric_helper_functions.py | 53 -- tests/functional/metrics/test_metrics.py | 399 ----------- 25 files changed, 3092 deletions(-) delete mode 100644 tests/functional/dependencies/data/seed.sql delete mode 100644 tests/functional/dependencies/data/update.sql delete mode 100644 tests/functional/dependencies/duplicate_dependency/dbt_project.yml delete mode 100644 tests/functional/dependencies/early_hook_dependency/dbt_project.yml delete mode 100644 tests/functional/dependencies/late_hook_dependency/dbt_project.yml delete mode 100644 tests/functional/dependencies/local_dependency/dbt_project.yml delete mode 100644 tests/functional/dependencies/local_dependency/macros/dep_macro.sql delete mode 100644 tests/functional/dependencies/local_dependency/macros/generate_schema_name.sql delete mode 100644 tests/functional/dependencies/local_dependency/models/model_to_import.sql delete mode 100644 tests/functional/dependencies/local_dependency/models/schema.yml delete mode 100644 tests/functional/dependencies/local_dependency/seeds/seed.csv delete mode 100644 tests/functional/dependencies/models_local/dep_source_model.sql delete mode 100644 tests/functional/dependencies/models_local/my_configured_model.sql delete mode 100644 tests/functional/dependencies/models_local/my_model.sql delete mode 100644 tests/functional/dependencies/models_local/schema.yml delete mode 100644 tests/functional/dependencies/models_local/source_override_model.sql delete mode 100644 tests/functional/dependencies/test_dependency_options.py delete mode 100644 tests/functional/dependencies/test_local_dependency.py delete mode 100644 tests/functional/dependencies/test_simple_dependency.py delete mode 100644 tests/functional/dependencies/test_simple_dependency_with_configs.py delete mode 100644 tests/functional/metrics/fixtures.py delete mode 100644 tests/functional/metrics/test_metric_configs.py delete mode 100644 tests/functional/metrics/test_metric_deferral.py delete mode 100644 tests/functional/metrics/test_metric_helper_functions.py delete mode 100644 tests/functional/metrics/test_metrics.py diff --git a/tests/functional/dependencies/data/seed.sql b/tests/functional/dependencies/data/seed.sql deleted file mode 100644 index b74c3b35e..000000000 --- a/tests/functional/dependencies/data/seed.sql +++ /dev/null @@ -1,586 +0,0 @@ -create table {schema}.seed ( - id INTEGER, - first_name VARCHAR(11), - email VARCHAR(31), - ip_address VARCHAR(15), - updated_at TIMESTAMP WITHOUT TIME ZONE -); - - -INSERT INTO {schema}.seed - ("id","first_name","email","ip_address","updated_at") -VALUES - (1,'Larry','lking0@miitbeian.gov.cn','69.135.206.194','2008-09-12 19:08:31'), - (2,'Larry','lperkins1@toplist.cz','64.210.133.162','1978-05-09 04:15:14'), - (3,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114','2011-10-16 04:07:57'), - (4,'Sandra','sgeorge3@livejournal.com','229.235.252.98','1973-07-19 10:52:43'), - (5,'Fred','fwoods4@google.cn','78.229.170.124','2012-09-30 16:38:29'), - (6,'Stephen','shanson5@livejournal.com','182.227.157.105','1995-11-07 21:40:50'), - (7,'William','wmartinez6@upenn.edu','135.139.249.50','1982-09-05 03:11:59'), - (8,'Jessica','jlong7@hao123.com','203.62.178.210','1991-10-16 11:03:15'), - (9,'Douglas','dwhite8@tamu.edu','178.187.247.1','1979-10-01 09:49:48'), - (10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249','2011-05-26 07:45:49'), - (11,'Ralph','rfieldsa@home.pl','55.152.163.149','1972-11-18 19:06:11'), - (12,'Louise','lnicholsb@samsung.com','141.116.153.154','2014-11-25 20:56:14'), - (13,'Clarence','cduncanc@sfgate.com','81.171.31.133','2011-11-17 07:02:36'), - (14,'Daniel','dfranklind@omniture.com','8.204.211.37','1980-09-13 00:09:04'), - (15,'Katherine','klanee@auda.org.au','176.96.134.59','1997-08-22 19:36:56'), - (16,'Billy','bwardf@wikia.com','214.108.78.85','2003-10-19 02:14:47'), - (17,'Annie','agarzag@ocn.ne.jp','190.108.42.70','1988-10-28 15:12:35'), - (18,'Shirley','scolemanh@fastcompany.com','109.251.164.84','1988-08-24 10:50:57'), - (19,'Roger','rfrazieri@scribd.com','38.145.218.108','1985-12-31 15:17:15'), - (20,'Lillian','lstanleyj@goodreads.com','47.57.236.17','1970-06-08 02:09:05'), - (21,'Aaron','arodriguezk@nps.gov','205.245.118.221','1985-10-11 23:07:49'), - (22,'Patrick','pparkerl@techcrunch.com','19.8.100.182','2006-03-29 12:53:56'), - (23,'Phillip','pmorenom@intel.com','41.38.254.103','2011-11-07 15:35:43'), - (24,'Henry','hgarcian@newsvine.com','1.191.216.252','2008-08-28 08:30:44'), - (25,'Irene','iturnero@opera.com','50.17.60.190','1994-04-01 07:15:02'), - (26,'Andrew','adunnp@pen.io','123.52.253.176','2000-11-01 06:03:25'), - (27,'David','dgutierrezq@wp.com','238.23.203.42','1988-01-25 07:29:18'), - (28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185','1983-01-01 13:36:37'), - (29,'Evelyn','epetersons@gizmodo.com','32.80.46.119','1979-07-16 17:24:12'), - (30,'Tammy','tmitchellt@purevolume.com','249.246.167.88','2001-04-03 10:00:23'), - (31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47','1986-02-11 21:35:50'), - (32,'Earl','eortizv@opera.com','166.47.248.240','1996-07-06 08:16:27'), - (33,'Juan','jgordonw@sciencedirect.com','71.77.2.200','1987-01-31 03:46:44'), - (34,'Diane','dhowellx@nyu.edu','140.94.133.12','1994-06-11 02:30:05'), - (35,'Randy','rkennedyy@microsoft.com','73.255.34.196','2005-05-26 20:28:39'), - (36,'Janice','jriveraz@time.com','22.214.227.32','1990-02-09 04:16:52'), - (37,'Laura','lperry10@diigo.com','159.148.145.73','2015-03-17 05:59:25'), - (38,'Gary','gray11@statcounter.com','40.193.124.56','1970-01-27 10:04:51'), - (39,'Jesse','jmcdonald12@typepad.com','31.7.86.103','2009-03-14 08:14:29'), - (40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239','1993-05-21 14:08:54'), - (41,'Scott','smoore14@archive.org','38.238.46.83','1980-08-30 11:16:56'), - (42,'Phillip','pevans15@cisco.com','158.234.59.34','2011-12-15 23:26:31'), - (43,'Steven','sriley16@google.ca','90.247.57.68','2011-10-29 19:03:28'), - (44,'Deborah','dbrown17@hexun.com','179.125.143.240','1995-04-10 14:36:07'), - (45,'Lori','lross18@ow.ly','64.80.162.180','1980-12-27 16:49:15'), - (46,'Sean','sjackson19@tumblr.com','240.116.183.69','1988-06-12 21:24:45'), - (47,'Terry','tbarnes1a@163.com','118.38.213.137','1997-09-22 16:43:19'), - (48,'Dorothy','dross1b@ebay.com','116.81.76.49','2005-02-28 13:33:24'), - (49,'Samuel','swashington1c@house.gov','38.191.253.40','1989-01-19 21:15:48'), - (50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174','2007-08-11 10:21:49'), - (51,'Wayne','whudson1e@princeton.edu','90.61.24.102','1983-07-03 16:58:12'), - (52,'Rose','rjames1f@plala.or.jp','240.83.81.10','1995-06-08 11:46:23'), - (53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145','2016-09-19 14:45:51'), - (54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94','1976-08-17 11:26:19'), - (55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45','2006-05-27 16:51:40'), - (56,'Johnny','jvasquez1j@trellian.com','118.202.238.23','1975-11-17 08:42:32'), - (57,'Patrick','pramirez1k@tamu.edu','231.25.153.198','1997-08-06 11:51:09'), - (58,'Helen','hlarson1l@prweb.com','8.40.21.39','1993-08-04 19:53:40'), - (59,'Patricia','pspencer1m@gmpg.org','212.198.40.15','1977-08-03 16:37:27'), - (60,'Joseph','jspencer1n@marriott.com','13.15.63.238','2005-07-23 20:22:06'), - (61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190','1976-05-19 21:47:44'), - (62,'Joan','jwebb1p@google.ru','105.229.170.71','1972-09-07 17:53:47'), - (63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244','2000-01-01 22:33:37'), - (64,'Katherine','khunter1r@smh.com.au','248.168.205.32','1991-01-09 06:40:24'), - (65,'Laura','lvasquez1s@wiley.com','128.129.115.152','1997-10-23 12:04:56'), - (66,'Juan','jdunn1t@state.gov','44.228.124.51','2004-11-10 05:07:35'), - (67,'Judith','jholmes1u@wiley.com','40.227.179.115','1977-08-02 17:01:45'), - (68,'Beverly','bbaker1v@wufoo.com','208.34.84.59','2016-03-06 20:07:23'), - (69,'Lawrence','lcarr1w@flickr.com','59.158.212.223','1988-09-13 06:07:21'), - (70,'Gloria','gwilliams1x@mtv.com','245.231.88.33','1995-03-18 22:32:46'), - (71,'Steven','ssims1y@cbslocal.com','104.50.58.255','2001-08-05 21:26:20'), - (72,'Betty','bmills1z@arstechnica.com','103.177.214.220','1981-12-14 21:26:54'), - (73,'Mildred','mfuller20@prnewswire.com','151.158.8.130','2000-04-19 10:13:55'), - (74,'Donald','dday21@icq.com','9.178.102.255','1972-12-03 00:58:24'), - (75,'Eric','ethomas22@addtoany.com','85.2.241.227','1992-11-01 05:59:30'), - (76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36','1985-10-24 06:50:01'), - (77,'Maria','mmartinez24@amazonaws.com','143.189.167.135','2005-10-05 05:17:42'), - (78,'Harry','hburton25@youtube.com','156.47.176.237','1978-03-26 05:53:33'), - (79,'Kevin','klawrence26@hao123.com','79.136.183.83','1994-10-12 04:38:52'), - (80,'David','dhall27@prweb.com','133.149.172.153','1976-12-15 16:24:24'), - (81,'Kathy','kperry28@twitter.com','229.242.72.228','1979-03-04 02:58:56'), - (82,'Adam','aprice29@elegantthemes.com','13.145.21.10','1982-11-07 11:46:59'), - (83,'Brandon','bgriffin2a@va.gov','73.249.128.212','2013-10-30 05:30:36'), - (84,'Henry','hnguyen2b@discovery.com','211.36.214.242','1985-01-09 06:37:27'), - (85,'Eric','esanchez2c@edublogs.org','191.166.188.251','2004-05-01 23:21:42'), - (86,'Jason','jlee2d@jimdo.com','193.92.16.182','1973-01-08 09:05:39'), - (87,'Diana','drichards2e@istockphoto.com','19.130.175.245','1994-10-05 22:50:49'), - (88,'Andrea','awelch2f@abc.net.au','94.155.233.96','2002-04-26 08:41:44'), - (89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111','2003-08-25 07:56:39'), - (90,'Jane','jsims2h@seesaa.net','43.4.220.135','1987-03-20 20:39:04'), - (91,'Larry','lgrant2i@si.edu','97.126.79.34','2000-09-07 20:26:19'), - (92,'Louis','ldean2j@prnewswire.com','37.148.40.127','2011-09-16 20:12:14'), - (93,'Jennifer','jcampbell2k@xing.com','38.106.254.142','1988-07-15 05:06:49'), - (94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187','2009-12-15 06:16:54'), - (95,'Lori','lstevens2m@icq.com','181.250.181.58','1984-10-28 03:29:19'), - (96,'Judy','jsimpson2n@marriott.com','180.121.239.219','1986-02-07 15:18:10'), - (97,'Phillip','phoward2o@usa.gov','255.247.0.175','2002-12-26 08:44:45'), - (98,'Gloria','gwalker2p@usa.gov','156.140.7.128','1997-10-04 07:58:58'), - (99,'Paul','pjohnson2q@umn.edu','183.59.198.197','1991-11-14 12:33:55'), - (100,'Frank','fgreene2r@blogspot.com','150.143.68.121','2010-06-12 23:55:39'), - (101,'Deborah','dknight2s@reverbnation.com','222.131.211.191','1970-07-08 08:54:23'), - (102,'Sandra','sblack2t@tripadvisor.com','254.183.128.254','2000-04-12 02:39:36'), - (103,'Edward','eburns2u@dailymotion.com','253.89.118.18','1993-10-10 10:54:01'), - (104,'Anthony','ayoung2v@ustream.tv','118.4.193.176','1978-08-26 17:07:29'), - (105,'Donald','dlawrence2w@wp.com','139.200.159.227','2007-07-21 20:56:20'), - (106,'Matthew','mfreeman2x@google.fr','205.26.239.92','2014-12-05 17:05:39'), - (107,'Sean','ssanders2y@trellian.com','143.89.82.108','1993-07-14 21:45:02'), - (108,'Sharon','srobinson2z@soundcloud.com','66.234.247.54','1977-04-06 19:07:03'), - (109,'Jennifer','jwatson30@t-online.de','196.102.127.7','1998-03-07 05:12:23'), - (110,'Clarence','cbrooks31@si.edu','218.93.234.73','2002-11-06 17:22:25'), - (111,'Jose','jflores32@goo.gl','185.105.244.231','1995-01-05 06:32:21'), - (112,'George','glee33@adobe.com','173.82.249.196','2015-01-04 02:47:46'), - (113,'Larry','lhill34@linkedin.com','66.5.206.195','2010-11-02 10:21:17'), - (114,'Marie','mmeyer35@mysql.com','151.152.88.107','1990-05-22 20:52:51'), - (115,'Clarence','cwebb36@skype.com','130.198.55.217','1972-10-27 07:38:54'), - (116,'Sarah','scarter37@answers.com','80.89.18.153','1971-08-24 19:29:30'), - (117,'Henry','hhughes38@webeden.co.uk','152.60.114.174','1973-01-27 09:00:42'), - (118,'Teresa','thenry39@hao123.com','32.187.239.106','2015-11-06 01:48:44'), - (119,'Billy','bgutierrez3a@sun.com','52.37.70.134','2002-03-19 03:20:19'), - (120,'Anthony','agibson3b@github.io','154.251.232.213','1991-04-19 01:08:15'), - (121,'Sandra','sromero3c@wikia.com','44.124.171.2','1998-09-06 20:30:34'), - (122,'Paula','pandrews3d@blogs.com','153.142.118.226','2003-06-24 16:31:24'), - (123,'Terry','tbaker3e@csmonitor.com','99.120.45.219','1970-12-09 23:57:21'), - (124,'Lois','lwilson3f@reuters.com','147.44.171.83','1971-01-09 22:28:51'), - (125,'Sara','smorgan3g@nature.com','197.67.192.230','1992-01-28 20:33:24'), - (126,'Charles','ctorres3h@china.com.cn','156.115.216.2','1993-10-02 19:36:34'), - (127,'Richard','ralexander3i@marriott.com','248.235.180.59','1999-02-03 18:40:55'), - (128,'Christina','charper3j@cocolog-nifty.com','152.114.116.129','1978-09-13 00:37:32'), - (129,'Steve','sadams3k@economist.com','112.248.91.98','2004-03-21 09:07:43'), - (130,'Katherine','krobertson3l@ow.ly','37.220.107.28','1977-03-18 19:28:50'), - (131,'Donna','dgibson3m@state.gov','222.218.76.221','1999-02-01 06:46:16'), - (132,'Christina','cwest3n@mlb.com','152.114.6.160','1979-12-24 15:30:35'), - (133,'Sandra','swillis3o@meetup.com','180.71.49.34','1984-09-27 08:05:54'), - (134,'Clarence','cedwards3p@smugmug.com','10.64.180.186','1979-04-16 16:52:10'), - (135,'Ruby','rjames3q@wp.com','98.61.54.20','2007-01-13 14:25:52'), - (136,'Sarah','smontgomery3r@tripod.com','91.45.164.172','2009-07-25 04:34:30'), - (137,'Sarah','soliver3s@eventbrite.com','30.106.39.146','2012-05-09 22:12:33'), - (138,'Deborah','dwheeler3t@biblegateway.com','59.105.213.173','1999-11-09 08:08:44'), - (139,'Deborah','dray3u@i2i.jp','11.108.186.217','2014-02-04 03:15:19'), - (140,'Paul','parmstrong3v@alexa.com','6.250.59.43','2009-12-21 10:08:53'), - (141,'Aaron','abishop3w@opera.com','207.145.249.62','1996-04-25 23:20:23'), - (142,'Henry','hsanders3x@google.ru','140.215.203.171','2012-01-29 11:52:32'), - (143,'Anne','aanderson3y@1688.com','74.150.102.118','1982-04-03 13:46:17'), - (144,'Victor','vmurphy3z@hugedomains.com','222.155.99.152','1987-11-03 19:58:41'), - (145,'Evelyn','ereid40@pbs.org','249.122.33.117','1977-12-14 17:09:57'), - (146,'Brian','bgonzalez41@wikia.com','246.254.235.141','1991-02-24 00:45:58'), - (147,'Sandra','sgray42@squarespace.com','150.73.28.159','1972-07-28 17:26:32'), - (148,'Alice','ajones43@a8.net','78.253.12.177','2002-12-05 16:57:46'), - (149,'Jessica','jhanson44@mapquest.com','87.229.30.160','1994-01-30 11:40:04'), - (150,'Louise','lbailey45@reuters.com','191.219.31.101','2011-09-07 21:11:45'), - (151,'Christopher','cgonzalez46@printfriendly.com','83.137.213.239','1984-10-24 14:58:04'), - (152,'Gregory','gcollins47@yandex.ru','28.176.10.115','1998-07-25 17:17:10'), - (153,'Jane','jperkins48@usnews.com','46.53.164.159','1979-08-19 15:25:00'), - (154,'Phyllis','plong49@yahoo.co.jp','208.140.88.2','1985-07-06 02:16:36'), - (155,'Adam','acarter4a@scribd.com','78.48.148.204','2005-07-20 03:31:09'), - (156,'Frank','fweaver4b@angelfire.com','199.180.255.224','2011-03-04 23:07:54'), - (157,'Ronald','rmurphy4c@cloudflare.com','73.42.97.231','1991-01-11 10:39:41'), - (158,'Richard','rmorris4d@e-recht24.de','91.9.97.223','2009-01-17 21:05:15'), - (159,'Rose','rfoster4e@woothemes.com','203.169.53.16','1991-04-21 02:09:38'), - (160,'George','ggarrett4f@uiuc.edu','186.61.5.167','1989-11-11 11:29:42'), - (161,'Victor','vhamilton4g@biblegateway.com','121.229.138.38','2012-06-22 18:01:23'), - (162,'Mark','mbennett4h@businessinsider.com','209.184.29.203','1980-04-16 15:26:34'), - (163,'Martin','mwells4i@ifeng.com','97.223.55.105','2010-05-26 14:08:18'), - (164,'Diana','dstone4j@google.ru','90.155.52.47','2013-02-11 00:14:54'), - (165,'Walter','wferguson4k@blogger.com','30.63.212.44','1986-02-20 17:46:46'), - (166,'Denise','dcoleman4l@vistaprint.com','10.209.153.77','1992-05-13 20:14:14'), - (167,'Philip','pknight4m@xing.com','15.28.135.167','2000-09-11 18:41:13'), - (168,'Russell','rcarr4n@youtube.com','113.55.165.50','2008-07-10 17:49:27'), - (169,'Donna','dburke4o@dion.ne.jp','70.0.105.111','1992-02-10 17:24:58'), - (170,'Anne','along4p@squidoo.com','36.154.58.107','2012-08-19 23:35:31'), - (171,'Clarence','cbanks4q@webeden.co.uk','94.57.53.114','1972-03-11 21:46:44'), - (172,'Betty','bbowman4r@cyberchimps.com','178.115.209.69','2013-01-13 21:34:51'), - (173,'Andrew','ahudson4s@nytimes.com','84.32.252.144','1998-09-15 14:20:04'), - (174,'Keith','kgordon4t@cam.ac.uk','189.237.211.102','2009-01-22 05:34:38'), - (175,'Patrick','pwheeler4u@mysql.com','47.22.117.226','1984-09-05 22:33:15'), - (176,'Jesse','jfoster4v@mapquest.com','229.95.131.46','1990-01-20 12:19:15'), - (177,'Arthur','afisher4w@jugem.jp','107.255.244.98','1983-10-13 11:08:46'), - (178,'Nicole','nryan4x@wsj.com','243.211.33.221','1974-05-30 23:19:14'), - (179,'Bruce','bjohnson4y@sfgate.com','17.41.200.101','1992-09-23 02:02:19'), - (180,'Terry','tcox4z@reference.com','20.189.120.106','1982-02-13 12:43:14'), - (181,'Ashley','astanley50@kickstarter.com','86.3.56.98','1976-05-09 01:27:16'), - (182,'Michael','mrivera51@about.me','72.118.249.0','1971-11-11 17:28:37'), - (183,'Steven','sgonzalez52@mozilla.org','169.112.247.47','2002-08-24 14:59:25'), - (184,'Kathleen','kfuller53@bloglovin.com','80.93.59.30','2002-03-11 13:41:29'), - (185,'Nicole','nhenderson54@usda.gov','39.253.60.30','1995-04-24 05:55:07'), - (186,'Ralph','rharper55@purevolume.com','167.147.142.189','1980-02-10 18:35:45'), - (187,'Heather','hcunningham56@photobucket.com','96.222.196.229','2007-06-15 05:37:50'), - (188,'Nancy','nlittle57@cbc.ca','241.53.255.175','2007-07-12 23:42:48'), - (189,'Juan','jramirez58@pinterest.com','190.128.84.27','1978-11-07 23:37:37'), - (190,'Beverly','bfowler59@chronoengine.com','54.144.230.49','1979-03-31 23:27:28'), - (191,'Shirley','sstevens5a@prlog.org','200.97.231.248','2011-12-06 07:08:50'), - (192,'Annie','areyes5b@squidoo.com','223.32.182.101','2011-05-28 02:42:09'), - (193,'Jack','jkelley5c@tiny.cc','47.34.118.150','1981-12-05 17:31:40'), - (194,'Keith','krobinson5d@1und1.de','170.210.209.31','1999-03-09 11:05:43'), - (195,'Joseph','jmiller5e@google.com.au','136.74.212.139','1984-10-08 13:18:20'), - (196,'Annie','aday5f@blogspot.com','71.99.186.69','1986-02-18 12:27:34'), - (197,'Nancy','nperez5g@liveinternet.ru','28.160.6.107','1983-10-20 17:51:20'), - (198,'Tammy','tward5h@ucoz.ru','141.43.164.70','1980-03-31 04:45:29'), - (199,'Doris','dryan5i@ted.com','239.117.202.188','1985-07-03 03:17:53'), - (200,'Rose','rmendoza5j@photobucket.com','150.200.206.79','1973-04-21 21:36:40'), - (201,'Cynthia','cbutler5k@hubpages.com','80.153.174.161','2001-01-20 01:42:26'), - (202,'Samuel','soliver5l@people.com.cn','86.127.246.140','1970-09-02 02:19:00'), - (203,'Carl','csanchez5m@mysql.com','50.149.237.107','1993-12-01 07:02:09'), - (204,'Kathryn','kowens5n@geocities.jp','145.166.205.201','2004-07-06 18:39:33'), - (205,'Nicholas','nnichols5o@parallels.com','190.240.66.170','2014-11-11 18:52:19'), - (206,'Keith','kwillis5p@youtube.com','181.43.206.100','1998-06-13 06:30:51'), - (207,'Justin','jwebb5q@intel.com','211.54.245.74','2000-11-04 16:58:26'), - (208,'Gary','ghicks5r@wikipedia.org','196.154.213.104','1992-12-01 19:48:28'), - (209,'Martin','mpowell5s@flickr.com','153.67.12.241','1983-06-30 06:24:32'), - (210,'Brenda','bkelley5t@xinhuanet.com','113.100.5.172','2005-01-08 20:50:22'), - (211,'Edward','eray5u@a8.net','205.187.246.65','2011-09-26 08:04:44'), - (212,'Steven','slawson5v@senate.gov','238.150.250.36','1978-11-22 02:48:09'), - (213,'Robert','rthompson5w@furl.net','70.7.89.236','2001-09-12 08:52:07'), - (214,'Jack','jporter5x@diigo.com','220.172.29.99','1976-07-26 14:29:21'), - (215,'Lisa','ljenkins5y@oakley.com','150.151.170.180','2010-03-20 19:21:16'), - (216,'Theresa','tbell5z@mayoclinic.com','247.25.53.173','2001-03-11 05:36:40'), - (217,'Jimmy','jstephens60@weather.com','145.101.93.235','1983-04-12 09:35:30'), - (218,'Louis','lhunt61@amazon.co.jp','78.137.6.253','1997-08-29 19:34:34'), - (219,'Lawrence','lgilbert62@ted.com','243.132.8.78','2015-04-08 22:06:56'), - (220,'David','dgardner63@4shared.com','204.40.46.136','1971-07-09 03:29:11'), - (221,'Charles','ckennedy64@gmpg.org','211.83.233.2','2011-02-26 11:55:04'), - (222,'Lillian','lbanks65@msu.edu','124.233.12.80','2010-05-16 20:29:02'), - (223,'Ernest','enguyen66@baidu.com','82.45.128.148','1996-07-04 10:07:04'), - (224,'Ryan','rrussell67@cloudflare.com','202.53.240.223','1983-08-05 12:36:29'), - (225,'Donald','ddavis68@ustream.tv','47.39.218.137','1989-05-27 02:30:56'), - (226,'Joe','jscott69@blogspot.com','140.23.131.75','1973-03-16 12:21:31'), - (227,'Anne','amarshall6a@google.ca','113.162.200.197','1988-12-09 03:38:29'), - (228,'Willie','wturner6b@constantcontact.com','85.83.182.249','1991-10-06 01:51:10'), - (229,'Nicole','nwilson6c@sogou.com','30.223.51.135','1977-05-29 19:54:56'), - (230,'Janet','jwheeler6d@stumbleupon.com','153.194.27.144','2011-03-13 12:48:47'), - (231,'Lois','lcarr6e@statcounter.com','0.41.36.53','1993-02-06 04:52:01'), - (232,'Shirley','scruz6f@tmall.com','37.156.39.223','2007-02-18 17:47:01'), - (233,'Patrick','pford6g@reverbnation.com','36.198.200.89','1977-03-06 15:47:24'), - (234,'Lisa','lhudson6h@usatoday.com','134.213.58.137','2014-10-28 01:56:56'), - (235,'Pamela','pmartinez6i@opensource.org','5.151.127.202','1987-11-30 16:44:47'), - (236,'Larry','lperez6j@infoseek.co.jp','235.122.96.148','1979-01-18 06:33:45'), - (237,'Pamela','pramirez6k@census.gov','138.233.34.163','2012-01-29 10:35:20'), - (238,'Daniel','dcarr6l@php.net','146.21.152.242','1984-11-17 08:22:59'), - (239,'Patrick','psmith6m@indiegogo.com','136.222.199.36','2001-05-30 22:16:44'), - (240,'Raymond','rhenderson6n@hc360.com','116.31.112.38','2000-01-05 20:35:41'), - (241,'Teresa','treynolds6o@miitbeian.gov.cn','198.126.205.220','1996-11-08 01:27:31'), - (242,'Johnny','jmason6p@flickr.com','192.8.232.114','2013-05-14 05:35:50'), - (243,'Angela','akelly6q@guardian.co.uk','234.116.60.197','1977-08-20 02:05:17'), - (244,'Douglas','dcole6r@cmu.edu','128.135.212.69','2016-10-26 17:40:36'), - (245,'Frances','fcampbell6s@twitpic.com','94.22.243.235','1987-04-26 07:07:13'), - (246,'Donna','dgreen6t@chron.com','227.116.46.107','2011-07-25 12:59:54'), - (247,'Benjamin','bfranklin6u@redcross.org','89.141.142.89','1974-05-03 20:28:18'), - (248,'Randy','rpalmer6v@rambler.ru','70.173.63.178','2011-12-20 17:40:18'), - (249,'Melissa','mmurray6w@bbb.org','114.234.118.137','1991-02-26 12:45:44'), - (250,'Jean','jlittle6x@epa.gov','141.21.163.254','1991-08-16 04:57:09'), - (251,'Daniel','dolson6y@nature.com','125.75.104.97','2010-04-23 06:25:54'), - (252,'Kathryn','kwells6z@eventbrite.com','225.104.28.249','2015-01-31 02:21:50'), - (253,'Theresa','tgonzalez70@ox.ac.uk','91.93.156.26','1971-12-11 10:31:31'), - (254,'Beverly','broberts71@bluehost.com','244.40.158.89','2013-09-21 13:02:31'), - (255,'Pamela','pmurray72@netscape.com','218.54.95.216','1985-04-16 00:34:00'), - (256,'Timothy','trichardson73@amazonaws.com','235.49.24.229','2000-11-11 09:48:28'), - (257,'Mildred','mpalmer74@is.gd','234.125.95.132','1992-05-25 02:25:02'), - (258,'Jessica','jcampbell75@google.it','55.98.30.140','2014-08-26 00:26:34'), - (259,'Beverly','bthomas76@cpanel.net','48.78.228.176','1970-08-18 10:40:05'), - (260,'Eugene','eward77@cargocollective.com','139.226.204.2','1996-12-04 23:17:00'), - (261,'Andrea','aallen78@webnode.com','160.31.214.38','2009-07-06 07:22:37'), - (262,'Justin','jruiz79@merriam-webster.com','150.149.246.122','2005-06-06 11:44:19'), - (263,'Kenneth','kedwards7a@networksolutions.com','98.82.193.128','2001-07-03 02:00:10'), - (264,'Rachel','rday7b@miibeian.gov.cn','114.15.247.221','1994-08-18 19:45:40'), - (265,'Russell','rmiller7c@instagram.com','184.130.152.253','1977-11-06 01:58:12'), - (266,'Bonnie','bhudson7d@cornell.edu','235.180.186.206','1990-12-03 22:45:24'), - (267,'Raymond','rknight7e@yandex.ru','161.2.44.252','1995-08-25 04:31:19'), - (268,'Bonnie','brussell7f@elpais.com','199.237.57.207','1991-03-29 08:32:06'), - (269,'Marie','mhenderson7g@elpais.com','52.203.131.144','2004-06-04 21:50:28'), - (270,'Alan','acarr7h@trellian.com','147.51.205.72','2005-03-03 10:51:31'), - (271,'Barbara','bturner7i@hugedomains.com','103.160.110.226','2004-08-04 13:42:40'), - (272,'Christina','cdaniels7j@census.gov','0.238.61.251','1972-10-18 12:47:33'), - (273,'Jeremy','jgomez7k@reuters.com','111.26.65.56','2013-01-13 10:41:35'), - (274,'Laura','lwood7l@icio.us','149.153.38.205','2011-06-25 09:33:59'), - (275,'Matthew','mbowman7m@auda.org.au','182.138.206.172','1999-03-05 03:25:36'), - (276,'Denise','dparker7n@icq.com','0.213.88.138','2011-11-04 09:43:06'), - (277,'Phillip','pparker7o@discuz.net','219.242.165.240','1973-10-19 04:22:29'), - (278,'Joan','jpierce7p@salon.com','63.31.213.202','1989-04-09 22:06:24'), - (279,'Irene','ibaker7q@cbc.ca','102.33.235.114','1992-09-04 13:00:57'), - (280,'Betty','bbowman7r@ted.com','170.91.249.242','2015-09-28 08:14:22'), - (281,'Teresa','truiz7s@boston.com','82.108.158.207','1999-07-18 05:17:09'), - (282,'Helen','hbrooks7t@slideshare.net','102.87.162.187','2003-01-06 15:45:29'), - (283,'Karen','kgriffin7u@wunderground.com','43.82.44.184','2010-05-28 01:56:37'), - (284,'Lisa','lfernandez7v@mtv.com','200.238.218.220','1993-04-03 20:33:51'), - (285,'Jesse','jlawrence7w@timesonline.co.uk','95.122.105.78','1990-01-05 17:28:43'), - (286,'Terry','tross7x@macromedia.com','29.112.114.133','2009-08-29 21:32:17'), - (287,'Angela','abradley7y@icq.com','177.44.27.72','1989-10-04 21:46:06'), - (288,'Maria','mhart7z@dailymotion.com','55.27.55.202','1975-01-21 01:22:57'), - (289,'Raymond','randrews80@pinterest.com','88.90.78.67','1992-03-16 21:37:40'), - (290,'Kathy','krice81@bluehost.com','212.63.196.102','2000-12-14 03:06:44'), - (291,'Cynthia','cramos82@nymag.com','107.89.190.6','2005-06-28 02:02:33'), - (292,'Kimberly','kjones83@mysql.com','86.169.101.101','2007-06-13 22:56:49'), - (293,'Timothy','thansen84@microsoft.com','108.100.254.90','2003-04-04 10:31:57'), - (294,'Carol','cspencer85@berkeley.edu','75.118.144.187','1999-03-30 14:53:21'), - (295,'Louis','lmedina86@latimes.com','141.147.163.24','1991-04-11 17:53:13'), - (296,'Margaret','mcole87@google.fr','53.184.26.83','1991-12-19 01:54:10'), - (297,'Mary','mgomez88@yellowpages.com','208.56.57.99','1976-05-21 18:05:08'), - (298,'Amanda','aanderson89@geocities.com','147.73.15.252','1987-08-22 15:05:28'), - (299,'Kathryn','kgarrett8a@nature.com','27.29.177.220','1976-07-15 04:25:04'), - (300,'Dorothy','dmason8b@shareasale.com','106.210.99.193','1990-09-03 21:39:31'), - (301,'Lois','lkennedy8c@amazon.de','194.169.29.187','2007-07-29 14:09:31'), - (302,'Irene','iburton8d@washingtonpost.com','196.143.110.249','2013-09-05 11:32:46'), - (303,'Betty','belliott8e@wired.com','183.105.222.199','1979-09-19 19:29:13'), - (304,'Bobby','bmeyer8f@census.gov','36.13.161.145','2014-05-24 14:34:39'), - (305,'Ann','amorrison8g@sfgate.com','72.154.54.137','1978-10-05 14:22:34'), - (306,'Daniel','djackson8h@wunderground.com','144.95.32.34','1990-07-27 13:23:05'), - (307,'Joe','jboyd8i@alibaba.com','187.105.86.178','2011-09-28 16:46:32'), - (308,'Ralph','rdunn8j@fc2.com','3.19.87.255','1984-10-18 08:00:40'), - (309,'Craig','ccarter8k@gizmodo.com','235.152.76.215','1998-07-04 12:15:21'), - (310,'Paula','pdean8l@hhs.gov','161.100.173.197','1973-02-13 09:38:55'), - (311,'Andrew','agarrett8m@behance.net','199.253.123.218','1991-02-14 13:36:32'), - (312,'Janet','jhowell8n@alexa.com','39.189.139.79','2012-11-24 20:17:33'), - (313,'Keith','khansen8o@godaddy.com','116.186.223.196','1987-08-23 21:22:05'), - (314,'Nicholas','nedwards8p@state.gov','142.175.142.11','1977-03-28 18:27:27'), - (315,'Jacqueline','jallen8q@oaic.gov.au','189.66.135.192','1994-10-26 11:44:26'), - (316,'Frank','fgardner8r@mapy.cz','154.77.119.169','1983-01-29 19:19:51'), - (317,'Eric','eharrison8s@google.cn','245.139.65.123','1984-02-04 09:54:36'), - (318,'Gregory','gcooper8t@go.com','171.147.0.221','2004-06-14 05:22:08'), - (319,'Jean','jfreeman8u@rakuten.co.jp','67.243.121.5','1977-01-07 18:23:43'), - (320,'Juan','jlewis8v@shinystat.com','216.181.171.189','2001-08-23 17:32:43'), - (321,'Randy','rwilliams8w@shinystat.com','105.152.146.28','1983-02-17 00:05:50'), - (322,'Stephen','shart8x@sciencedirect.com','196.131.205.148','2004-02-15 10:12:03'), - (323,'Annie','ahunter8y@example.com','63.36.34.103','2003-07-23 21:15:25'), - (324,'Melissa','mflores8z@cbc.ca','151.230.217.90','1983-11-02 14:53:56'), - (325,'Jane','jweaver90@about.me','0.167.235.217','1987-07-29 00:13:44'), - (326,'Anthony','asmith91@oracle.com','97.87.48.41','2001-05-31 18:44:11'), - (327,'Terry','tdavis92@buzzfeed.com','46.20.12.51','2015-09-12 23:13:55'), - (328,'Brandon','bmontgomery93@gravatar.com','252.101.48.186','2010-10-28 08:26:27'), - (329,'Chris','cmurray94@bluehost.com','25.158.167.97','2004-05-05 16:10:31'), - (330,'Denise','dfuller95@hugedomains.com','216.210.149.28','1979-04-20 08:57:24'), - (331,'Arthur','amcdonald96@sakura.ne.jp','206.42.36.213','2009-08-15 03:26:16'), - (332,'Jesse','jhoward97@google.cn','46.181.118.30','1974-04-18 14:08:41'), - (333,'Frank','fsimpson98@domainmarket.com','163.220.211.87','2006-06-30 14:46:52'), - (334,'Janice','jwoods99@pen.io','229.245.237.182','1988-04-06 11:52:58'), - (335,'Rebecca','rroberts9a@huffingtonpost.com','148.96.15.80','1976-10-05 08:44:16'), - (336,'Joshua','jray9b@opensource.org','192.253.12.198','1971-12-25 22:27:07'), - (337,'Joyce','jcarpenter9c@statcounter.com','125.171.46.215','2001-12-31 22:08:13'), - (338,'Andrea','awest9d@privacy.gov.au','79.101.180.201','1983-02-18 20:07:47'), - (339,'Christine','chudson9e@yelp.com','64.198.43.56','1997-09-08 08:03:43'), - (340,'Joe','jparker9f@earthlink.net','251.215.148.153','1973-11-04 05:08:18'), - (341,'Thomas','tkim9g@answers.com','49.187.34.47','1991-08-07 21:13:48'), - (342,'Janice','jdean9h@scientificamerican.com','4.197.117.16','2009-12-08 02:35:49'), - (343,'James','jmitchell9i@umich.edu','43.121.18.147','2011-04-28 17:04:09'), - (344,'Charles','cgardner9j@purevolume.com','197.78.240.240','1998-02-11 06:47:07'), - (345,'Robert','rhenderson9k@friendfeed.com','215.84.180.88','2002-05-10 15:33:14'), - (346,'Chris','cgray9l@4shared.com','249.70.192.240','1998-10-03 16:43:42'), - (347,'Gloria','ghayes9m@hibu.com','81.103.138.26','1999-12-26 11:23:13'), - (348,'Edward','eramirez9n@shareasale.com','38.136.90.136','2010-08-19 08:01:06'), - (349,'Cheryl','cbutler9o@google.ca','172.180.78.172','1995-05-27 20:03:52'), - (350,'Margaret','mwatkins9p@sfgate.com','3.20.198.6','2014-10-21 01:42:58'), - (351,'Rebecca','rwelch9q@examiner.com','45.81.42.208','2001-02-08 12:19:06'), - (352,'Joe','jpalmer9r@phpbb.com','163.202.92.190','1970-01-05 11:29:12'), - (353,'Sandra','slewis9s@dyndns.org','77.215.201.236','1974-01-05 07:04:04'), - (354,'Todd','tfranklin9t@g.co','167.125.181.82','2009-09-28 10:13:58'), - (355,'Joseph','jlewis9u@webmd.com','244.204.6.11','1990-10-21 15:49:57'), - (356,'Alan','aknight9v@nydailynews.com','152.197.95.83','1996-03-08 08:43:17'), - (357,'Sharon','sdean9w@123-reg.co.uk','237.46.40.26','1985-11-30 12:09:24'), - (358,'Annie','awright9x@cafepress.com','190.45.231.111','2000-08-24 11:56:06'), - (359,'Diane','dhamilton9y@youtube.com','85.146.171.196','2015-02-24 02:03:57'), - (360,'Antonio','alane9z@auda.org.au','61.63.146.203','2001-05-13 03:43:34'), - (361,'Matthew','mallena0@hhs.gov','29.97.32.19','1973-02-19 23:43:32'), - (362,'Bonnie','bfowlera1@soup.io','251.216.99.53','2013-08-01 15:35:41'), - (363,'Margaret','mgraya2@examiner.com','69.255.151.79','1998-01-23 22:24:59'), - (364,'Joan','jwagnera3@printfriendly.com','192.166.120.61','1973-07-13 00:30:22'), - (365,'Catherine','cperkinsa4@nytimes.com','58.21.24.214','2006-11-19 11:52:26'), - (366,'Mark','mcartera5@cpanel.net','220.33.102.142','2007-09-09 09:43:27'), - (367,'Paula','ppricea6@msn.com','36.182.238.124','2009-11-11 09:13:05'), - (368,'Catherine','cgreena7@army.mil','228.203.58.19','2005-08-09 16:52:15'), - (369,'Helen','hhamiltona8@symantec.com','155.56.194.99','2005-02-01 05:40:36'), - (370,'Jane','jmeyera9@ezinearticles.com','133.244.113.213','2013-11-06 22:10:23'), - (371,'Wanda','wevansaa@bloglovin.com','233.125.192.48','1994-12-26 23:43:42'), - (372,'Mark','mmarshallab@tumblr.com','114.74.60.47','2016-09-29 18:03:01'), - (373,'Andrew','amartinezac@google.cn','182.54.37.130','1976-06-06 17:04:17'), - (374,'Helen','hmoralesad@e-recht24.de','42.45.4.123','1977-03-28 19:06:59'), - (375,'Bonnie','bstoneae@php.net','196.149.79.137','1970-02-05 17:05:58'), - (376,'Douglas','dfreemanaf@nasa.gov','215.65.124.218','2008-11-20 21:51:55'), - (377,'Willie','wwestag@army.mil','35.189.92.118','1992-07-24 05:08:08'), - (378,'Cheryl','cwagnerah@upenn.edu','228.239.222.141','2010-01-25 06:29:01'), - (379,'Sandra','swardai@baidu.com','63.11.113.240','1985-05-23 08:07:37'), - (380,'Julie','jrobinsonaj@jugem.jp','110.58.202.50','2015-03-05 09:42:07'), - (381,'Larry','lwagnerak@shop-pro.jp','98.234.25.24','1975-07-22 22:22:02'), - (382,'Juan','jcastilloal@yelp.com','24.174.74.202','2007-01-17 09:32:43'), - (383,'Donna','dfrazieram@artisteer.com','205.26.147.45','1990-02-11 20:55:46'), - (384,'Rachel','rfloresan@w3.org','109.60.216.162','1983-05-22 22:42:18'), - (385,'Robert','rreynoldsao@theguardian.com','122.65.209.130','2009-05-01 18:02:51'), - (386,'Donald','dbradleyap@etsy.com','42.54.35.126','1997-01-16 16:31:52'), - (387,'Rachel','rfisheraq@nih.gov','160.243.250.45','2006-02-17 22:05:49'), - (388,'Nicholas','nhamiltonar@princeton.edu','156.211.37.111','1976-06-21 03:36:29'), - (389,'Timothy','twhiteas@ca.gov','36.128.23.70','1975-09-24 03:51:18'), - (390,'Diana','dbradleyat@odnoklassniki.ru','44.102.120.184','1983-04-27 09:02:50'), - (391,'Billy','bfowlerau@jimdo.com','91.200.68.196','1995-01-29 06:57:35'), - (392,'Bruce','bandrewsav@ucoz.com','48.12.101.125','1992-10-27 04:31:39'), - (393,'Linda','lromeroaw@usa.gov','100.71.233.19','1992-06-08 15:13:18'), - (394,'Debra','dwatkinsax@ucoz.ru','52.160.233.193','2001-11-11 06:51:01'), - (395,'Katherine','kburkeay@wix.com','151.156.242.141','2010-06-14 19:54:28'), - (396,'Martha','mharrisonaz@youku.com','21.222.10.199','1989-10-16 14:17:55'), - (397,'Dennis','dwellsb0@youtu.be','103.16.29.3','1985-12-21 06:05:51'), - (398,'Gloria','grichardsb1@bloglines.com','90.147.120.234','1982-08-27 01:04:43'), - (399,'Brenda','bfullerb2@t.co','33.253.63.90','2011-04-20 05:00:35'), - (400,'Larry','lhendersonb3@disqus.com','88.95.132.128','1982-08-31 02:15:12'), - (401,'Richard','rlarsonb4@wisc.edu','13.48.231.150','1979-04-15 14:08:09'), - (402,'Terry','thuntb5@usa.gov','65.91.103.240','1998-05-15 11:50:49'), - (403,'Harry','hburnsb6@nasa.gov','33.38.21.244','1981-04-12 14:02:20'), - (404,'Diana','dellisb7@mlb.com','218.229.81.135','1997-01-29 00:17:25'), - (405,'Jack','jburkeb8@tripadvisor.com','210.227.182.216','1984-03-09 17:24:03'), - (406,'Julia','jlongb9@fotki.com','10.210.12.104','2005-10-26 03:54:13'), - (407,'Lois','lscottba@msu.edu','188.79.136.138','1973-02-02 18:40:39'), - (408,'Sandra','shendersonbb@shareasale.com','114.171.220.108','2012-06-09 18:22:26'), - (409,'Irene','isanchezbc@cdbaby.com','109.255.50.119','1983-09-28 21:11:27'), - (410,'Emily','ebrooksbd@bandcamp.com','227.81.93.79','1970-08-31 21:08:01'), - (411,'Michelle','mdiazbe@businessweek.com','236.249.6.226','1993-05-22 08:07:07'), - (412,'Tammy','tbennettbf@wisc.edu','145.253.239.152','1978-12-31 20:24:51'), - (413,'Christine','cgreenebg@flickr.com','97.25.140.118','1978-07-17 12:55:30'), - (414,'Patricia','pgarzabh@tuttocitta.it','139.246.192.211','1984-02-27 13:40:08'), - (415,'Kimberly','kromerobi@aol.com','73.56.88.247','1976-09-16 14:22:04'), - (416,'George','gjohnstonbj@fda.gov','240.36.245.185','1979-07-24 14:36:02'), - (417,'Eugene','efullerbk@sciencedaily.com','42.38.105.140','2012-09-12 01:56:41'), - (418,'Andrea','astevensbl@goo.gl','31.152.207.204','1979-05-24 11:06:21'), - (419,'Shirley','sreidbm@scientificamerican.com','103.60.31.241','1984-02-23 04:07:41'), - (420,'Terry','tmorenobn@blinklist.com','92.161.34.42','1994-06-25 14:01:35'), - (421,'Christopher','cmorenobo@go.com','158.86.176.82','1973-09-05 09:18:47'), - (422,'Dennis','dhansonbp@ning.com','40.160.81.75','1982-01-20 10:19:41'), - (423,'Beverly','brussellbq@de.vu','138.32.56.204','1997-11-06 07:20:19'), - (424,'Howard','hparkerbr@163.com','103.171.134.171','2015-06-24 15:37:10'), - (425,'Helen','hmccoybs@fema.gov','61.200.4.71','1995-06-20 08:59:10'), - (426,'Ann','ahudsonbt@cafepress.com','239.187.71.125','1977-04-11 07:59:28'), - (427,'Tina','twestbu@nhs.uk','80.213.117.74','1992-08-19 05:54:44'), - (428,'Terry','tnguyenbv@noaa.gov','21.93.118.95','1991-09-19 23:22:55'), - (429,'Ashley','aburtonbw@wix.com','233.176.205.109','2009-11-10 05:01:20'), - (430,'Eric','emyersbx@1und1.de','168.91.212.67','1987-08-10 07:16:20'), - (431,'Barbara','blittleby@lycos.com','242.14.189.239','2008-08-02 12:13:04'), - (432,'Sean','sevansbz@instagram.com','14.39.177.13','2007-04-16 17:28:49'), - (433,'Shirley','sburtonc0@newsvine.com','34.107.138.76','1980-12-10 02:19:29'), - (434,'Patricia','pfreemanc1@so-net.ne.jp','219.213.142.117','1987-03-01 02:25:45'), - (435,'Paula','pfosterc2@vkontakte.ru','227.14.138.141','1972-09-22 12:59:34'), - (436,'Nicole','nstewartc3@1688.com','8.164.23.115','1998-10-27 00:10:17'), - (437,'Earl','ekimc4@ovh.net','100.26.244.177','2013-01-22 10:05:46'), - (438,'Beverly','breedc5@reuters.com','174.12.226.27','1974-09-22 07:29:36'), - (439,'Lawrence','lbutlerc6@a8.net','105.164.42.164','1992-06-05 00:43:40'), - (440,'Charles','cmoorec7@ucoz.com','252.197.131.69','1990-04-09 02:34:05'), - (441,'Alice','alawsonc8@live.com','183.73.220.232','1989-02-28 09:11:04'), - (442,'Dorothy','dcarpenterc9@arstechnica.com','241.47.200.14','2005-05-02 19:57:21'), - (443,'Carolyn','cfowlerca@go.com','213.109.55.202','1978-09-10 20:18:20'), - (444,'Anthony','alongcb@free.fr','169.221.158.204','1984-09-13 01:59:23'), - (445,'Annie','amoorecc@e-recht24.de','50.34.148.61','2009-03-26 03:41:07'), - (446,'Carlos','candrewscd@ihg.com','236.69.59.212','1972-03-29 22:42:48'), - (447,'Beverly','bramosce@google.ca','164.250.184.49','1982-11-10 04:34:01'), - (448,'Teresa','tlongcf@umich.edu','174.88.53.223','1987-05-17 12:48:00'), - (449,'Roy','rboydcg@uol.com.br','91.58.243.215','1974-06-16 17:59:54'), - (450,'Ashley','afieldsch@tamu.edu','130.138.11.126','1983-09-15 05:52:36'), - (451,'Judith','jhawkinsci@cmu.edu','200.187.103.245','2003-10-22 12:24:03'), - (452,'Rebecca','rwestcj@ocn.ne.jp','72.85.3.103','1980-11-13 11:01:26'), - (453,'Raymond','rporterck@infoseek.co.jp','146.33.216.151','1982-05-17 23:58:03'), - (454,'Janet','jmarshallcl@odnoklassniki.ru','52.46.193.166','1998-10-04 00:02:21'), - (455,'Shirley','speterscm@salon.com','248.126.31.15','1987-01-30 06:04:59'), - (456,'Annie','abowmancn@economist.com','222.213.248.59','2006-03-14 23:52:59'), - (457,'Jean','jlarsonco@blogspot.com','71.41.25.195','2007-09-08 23:49:45'), - (458,'Phillip','pmoralescp@stanford.edu','74.119.87.28','2011-03-14 20:25:40'), - (459,'Norma','nrobinsoncq@economist.com','28.225.21.54','1989-10-21 01:22:43'), - (460,'Kimberly','kclarkcr@dion.ne.jp','149.171.132.153','2008-06-27 02:27:30'), - (461,'Ruby','rmorriscs@ucla.edu','177.85.163.249','2016-01-28 16:43:44'), - (462,'Jonathan','jcastilloct@tripod.com','78.4.28.77','2000-05-24 17:33:06'), - (463,'Edward','ebryantcu@jigsy.com','140.31.98.193','1992-12-17 08:32:47'), - (464,'Chris','chamiltoncv@eepurl.com','195.171.234.206','1970-12-05 03:42:19'), - (465,'Michael','mweavercw@reference.com','7.233.133.213','1987-03-29 02:30:54'), - (466,'Howard','hlawrencecx@businessweek.com','113.225.124.224','1990-07-30 07:20:57'), - (467,'Philip','phowardcy@comsenz.com','159.170.247.249','2010-10-15 10:18:37'), - (468,'Mary','mmarshallcz@xing.com','125.132.189.70','2007-07-19 13:48:47'), - (469,'Scott','salvarezd0@theguardian.com','78.49.103.230','1987-10-31 06:10:44'), - (470,'Wayne','wcarrolld1@blog.com','238.1.120.204','1980-11-19 03:26:10'), - (471,'Jennifer','jwoodsd2@multiply.com','92.20.224.49','2010-05-06 22:17:04'), - (472,'Raymond','rwelchd3@toplist.cz','176.158.35.240','2007-12-12 19:02:51'), - (473,'Steven','sdixond4@wisc.edu','167.55.237.52','1984-05-05 11:44:37'), - (474,'Ralph','rjamesd5@ameblo.jp','241.190.50.133','2000-07-06 08:44:37'), - (475,'Jason','jrobinsond6@hexun.com','138.119.139.56','2006-02-03 05:27:45'), - (476,'Doris','dwoodd7@fema.gov','180.220.156.190','1978-05-11 20:14:20'), - (477,'Elizabeth','eberryd8@youtu.be','74.188.53.229','2006-11-18 08:29:06'), - (478,'Irene','igilbertd9@privacy.gov.au','194.152.218.1','1985-09-17 02:46:52'), - (479,'Jessica','jdeanda@ameblo.jp','178.103.93.118','1974-06-07 19:04:05'), - (480,'Rachel','ralvarezdb@phoca.cz','17.22.223.174','1999-03-08 02:43:25'), - (481,'Kenneth','kthompsondc@shinystat.com','229.119.91.234','2007-05-15 13:17:32'), - (482,'Harold','hmurraydd@parallels.com','133.26.188.80','1993-11-15 03:42:07'), - (483,'Paula','phowellde@samsung.com','34.215.28.216','1993-11-29 15:55:00'), - (484,'Ruth','rpiercedf@tripadvisor.com','111.30.130.123','1986-08-17 10:19:38'), - (485,'Phyllis','paustindg@vk.com','50.84.34.178','1994-04-13 03:05:24'), - (486,'Laura','lfosterdh@usnews.com','37.8.101.33','2001-06-30 08:58:59'), - (487,'Eric','etaylordi@com.com','103.183.253.45','2006-09-15 20:18:46'), - (488,'Doris','driveradj@prweb.com','247.16.2.199','1989-05-08 09:27:09'), - (489,'Ryan','rhughesdk@elegantthemes.com','103.234.153.232','1989-08-01 18:36:06'), - (490,'Steve','smoralesdl@jigsy.com','3.76.84.207','2011-03-13 17:01:05'), - (491,'Louis','lsullivandm@who.int','78.135.44.208','1975-11-26 16:01:23'), - (492,'Catherine','ctuckerdn@seattletimes.com','93.137.106.21','1990-03-13 16:14:56'), - (493,'Ann','adixondo@gmpg.org','191.136.222.111','2002-06-05 14:22:18'), - (494,'Johnny','jhartdp@amazon.com','103.252.198.39','1988-07-30 23:54:49'), - (495,'Susan','srichardsdq@skype.com','126.247.192.11','2005-01-09 12:08:14'), - (496,'Brenda','bparkerdr@skype.com','63.232.216.86','1974-05-18 05:58:29'), - (497,'Tammy','tmurphyds@constantcontact.com','56.56.37.112','2014-08-05 18:22:25'), - (498,'Larry','lhayesdt@wordpress.com','162.146.13.46','1997-02-26 14:01:53'), - (499,'Evelyn','ethomasdu@hhs.gov','6.241.88.250','2007-09-14 13:03:34'), - (500,'Paula','pshawdv@networksolutions.com','123.27.47.249','2003-10-30 21:19:20'); - -create table {schema}.seed_config_expected_1 as ( - - select *, 'default'::text as c1, 'default'::text as c2, 'was true'::text as some_bool from {schema}.seed - -); - -create table {schema}.seed_config_expected_2 as ( - - select *, 'abc'::text as c1, 'def'::text as c2, 'was true'::text as some_bool from {schema}.seed - -); - -create table {schema}.seed_config_expected_3 as ( - - select *, 'ghi'::text as c1, 'jkl'::text as c2, 'was true'::text as some_bool from {schema}.seed - -); - -create table {schema}.seed_summary ( - year timestamp without time zone, - count bigint -); - -INSERT INTO {schema}.seed_summary - ("year","count") -VALUES - ('1970-01-01 00:00:00',10), - ('1971-01-01 00:00:00',6), - ('1972-01-01 00:00:00',9), - ('1973-01-01 00:00:00',12), - ('1974-01-01 00:00:00',8), - ('1975-01-01 00:00:00',5), - ('1976-01-01 00:00:00',11), - ('1977-01-01 00:00:00',13), - ('1978-01-01 00:00:00',11), - ('1979-01-01 00:00:00',13), - ('1980-01-01 00:00:00',9), - ('1981-01-01 00:00:00',3), - ('1982-01-01 00:00:00',9), - ('1983-01-01 00:00:00',15), - ('1984-01-01 00:00:00',13), - ('1985-01-01 00:00:00',11), - ('1986-01-01 00:00:00',5), - ('1987-01-01 00:00:00',14), - ('1988-01-01 00:00:00',9), - ('1989-01-01 00:00:00',10), - ('1990-01-01 00:00:00',12), - ('1991-01-01 00:00:00',16), - ('1992-01-01 00:00:00',15), - ('1993-01-01 00:00:00',11), - ('1994-01-01 00:00:00',10), - ('1995-01-01 00:00:00',10), - ('1996-01-01 00:00:00',6), - ('1997-01-01 00:00:00',11), - ('1998-01-01 00:00:00',12), - ('1999-01-01 00:00:00',9), - ('2000-01-01 00:00:00',13), - ('2001-01-01 00:00:00',14), - ('2002-01-01 00:00:00',9), - ('2003-01-01 00:00:00',8), - ('2004-01-01 00:00:00',9), - ('2005-01-01 00:00:00',14), - ('2006-01-01 00:00:00',9), - ('2007-01-01 00:00:00',16), - ('2008-01-01 00:00:00',6), - ('2009-01-01 00:00:00',15), - ('2010-01-01 00:00:00',13), - ('2011-01-01 00:00:00',23), - ('2012-01-01 00:00:00',9), - ('2013-01-01 00:00:00',10), - ('2014-01-01 00:00:00',9), - ('2015-01-01 00:00:00',10), - ('2016-01-01 00:00:00',5); diff --git a/tests/functional/dependencies/data/update.sql b/tests/functional/dependencies/data/update.sql deleted file mode 100644 index a3845ee41..000000000 --- a/tests/functional/dependencies/data/update.sql +++ /dev/null @@ -1,7 +0,0 @@ - -UPDATE {schema}.seed set first_name = 'Paul', updated_at = now() where id = 500; - -INSERT INTO {schema}.seed - ("id","first_name","email","ip_address","updated_at") -VALUES - (501, 'Steve', 'sthomas@hhs.gov', '6.241.88.251', now()); diff --git a/tests/functional/dependencies/duplicate_dependency/dbt_project.yml b/tests/functional/dependencies/duplicate_dependency/dbt_project.yml deleted file mode 100644 index dbda758fc..000000000 --- a/tests/functional/dependencies/duplicate_dependency/dbt_project.yml +++ /dev/null @@ -1,5 +0,0 @@ -name: 'test' -version: '1.0' -config-version: 2 - -profile: 'default' diff --git a/tests/functional/dependencies/early_hook_dependency/dbt_project.yml b/tests/functional/dependencies/early_hook_dependency/dbt_project.yml deleted file mode 100644 index 7cde1ad41..000000000 --- a/tests/functional/dependencies/early_hook_dependency/dbt_project.yml +++ /dev/null @@ -1,6 +0,0 @@ -name: early_hooks -version: '1.0' -config-version: 2 -on-run-start: - - create table {{ var('test_create_table') }} as (select 1 as id) - - create table {{ var('test_create_second_table') }} as (select 3 as id) diff --git a/tests/functional/dependencies/late_hook_dependency/dbt_project.yml b/tests/functional/dependencies/late_hook_dependency/dbt_project.yml deleted file mode 100644 index 62750f8d3..000000000 --- a/tests/functional/dependencies/late_hook_dependency/dbt_project.yml +++ /dev/null @@ -1,6 +0,0 @@ -name: late_hooks -version: '1.0' -config-version: 2 -on-run-start: - - insert into {{ var('test_create_table') }} values (2) - - insert into {{ var('test_create_second_table') }} values (4) diff --git a/tests/functional/dependencies/local_dependency/dbt_project.yml b/tests/functional/dependencies/local_dependency/dbt_project.yml deleted file mode 100644 index d56280a55..000000000 --- a/tests/functional/dependencies/local_dependency/dbt_project.yml +++ /dev/null @@ -1,23 +0,0 @@ - -name: 'local_dep' -version: '1.0' -config-version: 2 - -profile: 'default' - -model-paths: ["models"] -analysis-paths: ["analyses"] -test-paths: ["tests"] -seed-paths: ["seeds"] -macro-paths: ["macros"] - -require-dbt-version: '>=0.1.0' - -target-path: "target" # directory which will store compiled SQL files -clean-targets: # directories to be removed by `dbt clean` - - "target" - - "dbt_packages" - - -seeds: - quote_columns: False diff --git a/tests/functional/dependencies/local_dependency/macros/dep_macro.sql b/tests/functional/dependencies/local_dependency/macros/dep_macro.sql deleted file mode 100644 index 81e9a0fae..000000000 --- a/tests/functional/dependencies/local_dependency/macros/dep_macro.sql +++ /dev/null @@ -1,3 +0,0 @@ -{% macro some_overridden_macro() -%} -100 -{%- endmacro %} diff --git a/tests/functional/dependencies/local_dependency/macros/generate_schema_name.sql b/tests/functional/dependencies/local_dependency/macros/generate_schema_name.sql deleted file mode 100644 index 1e8d62a6b..000000000 --- a/tests/functional/dependencies/local_dependency/macros/generate_schema_name.sql +++ /dev/null @@ -1,15 +0,0 @@ -{# This should not be ignored, even as it's in a subpackage #} -{% macro generate_schema_name(custom_schema_name=none, node=none) -%} - {{ var('schema_override', target.schema) }} -{%- endmacro %} - -{# This should not be ignored, even as it's in a subpackage #} -{% macro generate_database_name(custom_database_name=none, node=none) -%} - {{ 'dbt' }} -{%- endmacro %} - - -{# This should not be ignored, even as it's in a subpackage #} -{% macro generate_alias_name(custom_alias_name=none, node=none) -%} - {{ node.name ~ '_subpackage_generate_alias_name' }} -{%- endmacro %} diff --git a/tests/functional/dependencies/local_dependency/models/model_to_import.sql b/tests/functional/dependencies/local_dependency/models/model_to_import.sql deleted file mode 100644 index 4b91aa0f2..000000000 --- a/tests/functional/dependencies/local_dependency/models/model_to_import.sql +++ /dev/null @@ -1 +0,0 @@ -select * from {{ ref('seed') }} diff --git a/tests/functional/dependencies/local_dependency/models/schema.yml b/tests/functional/dependencies/local_dependency/models/schema.yml deleted file mode 100644 index 4b3278eda..000000000 --- a/tests/functional/dependencies/local_dependency/models/schema.yml +++ /dev/null @@ -1,11 +0,0 @@ -version: 2 -sources: - - name: my_source - schema: invalid_schema - tables: - - name: my_table - - name: seed_source - schema: "{{ var('schema_override', target.schema) }}" - tables: - - name: "seed" - identifier: "seed_subpackage_generate_alias_name" diff --git a/tests/functional/dependencies/local_dependency/seeds/seed.csv b/tests/functional/dependencies/local_dependency/seeds/seed.csv deleted file mode 100644 index 3ff3deb87..000000000 --- a/tests/functional/dependencies/local_dependency/seeds/seed.csv +++ /dev/null @@ -1,2 +0,0 @@ -id -1 diff --git a/tests/functional/dependencies/models_local/dep_source_model.sql b/tests/functional/dependencies/models_local/dep_source_model.sql deleted file mode 100644 index e7e5fcfd5..000000000 --- a/tests/functional/dependencies/models_local/dep_source_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{# If our dependency source didn't exist, this would be an errror #} -select * from {{ source('seed_source', 'seed') }} diff --git a/tests/functional/dependencies/models_local/my_configured_model.sql b/tests/functional/dependencies/models_local/my_configured_model.sql deleted file mode 100644 index 554ed3b44..000000000 --- a/tests/functional/dependencies/models_local/my_configured_model.sql +++ /dev/null @@ -1,4 +0,0 @@ -{{ - config(schema='configured') -}} -select * from {{ ref('model_to_import') }} diff --git a/tests/functional/dependencies/models_local/my_model.sql b/tests/functional/dependencies/models_local/my_model.sql deleted file mode 100644 index a84f75e1e..000000000 --- a/tests/functional/dependencies/models_local/my_model.sql +++ /dev/null @@ -1,2 +0,0 @@ - -select * from {{ ref('model_to_import') }} diff --git a/tests/functional/dependencies/models_local/schema.yml b/tests/functional/dependencies/models_local/schema.yml deleted file mode 100644 index af65187f4..000000000 --- a/tests/functional/dependencies/models_local/schema.yml +++ /dev/null @@ -1,7 +0,0 @@ -version: 2 -sources: - - name: my_source - schema: "{{ var('schema_override', target.schema) }}" - tables: - - name: my_table - identifier: seed diff --git a/tests/functional/dependencies/models_local/source_override_model.sql b/tests/functional/dependencies/models_local/source_override_model.sql deleted file mode 100644 index d567d2083..000000000 --- a/tests/functional/dependencies/models_local/source_override_model.sql +++ /dev/null @@ -1,2 +0,0 @@ -{# If our source override didn't take, this would be an errror #} -select * from {{ source('my_source', 'my_table') }} diff --git a/tests/functional/dependencies/test_dependency_options.py b/tests/functional/dependencies/test_dependency_options.py deleted file mode 100644 index 08ffc5d2f..000000000 --- a/tests/functional/dependencies/test_dependency_options.py +++ /dev/null @@ -1,106 +0,0 @@ -import os -import shutil - -from dbt.tests.util import run_dbt -import pytest - - -class TestDepsOptions(object): - # this revision of dbt-integration-project requires dbt-utils.git@0.5.0, which the - # package config handling should detect - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "package": "fivetran/fivetran_utils", - "version": "0.4.7", - }, - ] - } - - @pytest.fixture - def clean_start(self, project): - if os.path.exists("dbt_packages"): - shutil.rmtree("dbt_packages") - if os.path.exists("package-lock.yml"): - os.remove("package-lock.yml") - - def test_deps_lock(self, clean_start): - run_dbt(["deps", "--lock"]) - assert not os.path.exists("dbt_packages") - assert os.path.exists("package-lock.yml") - with open("package-lock.yml") as fp: - contents = fp.read() - assert ( - contents - == """packages: -- package: fivetran/fivetran_utils - version: 0.4.7 -- package: dbt-labs/dbt_utils - version: 1.1.1 -sha1_hash: 71304bca2138cf8004070b3573a1e17183c0c1a8 -""" - ) - - def test_deps_default(self, clean_start): - run_dbt(["deps"]) - assert len(os.listdir("dbt_packages")) == 2 - assert os.path.exists("package-lock.yml") - with open("package-lock.yml") as fp: - contents = fp.read() - assert ( - contents - == """packages: -- package: fivetran/fivetran_utils - version: 0.4.7 -- package: dbt-labs/dbt_utils - version: 1.1.1 -sha1_hash: 71304bca2138cf8004070b3573a1e17183c0c1a8 -""" - ) - - def test_deps_add(self, clean_start): - run_dbt(["deps", "--add-package", "dbt-labs/audit_helper@0.9.0"]) - with open("packages.yml") as fp: - contents = fp.read() - assert ( - contents - == """packages: - - package: fivetran/fivetran_utils - version: 0.4.7 - - package: dbt-labs/audit_helper - version: 0.9.0 -""" - ) - assert len(os.listdir("dbt_packages")) == 3 - - def test_deps_add_without_install(self, clean_start): - os.rename("packages.yml", "dependencies.yml") - run_dbt( - [ - "deps", - "--add-package", - "dbt-labs/audit_helper@0.9.0", - "--lock", - ] - ) - assert not os.path.exists("dbt_packages") - assert not os.path.exists("packages.yml") - with open("dependencies.yml") as fp: - contents = fp.read() - assert ( - contents - == """packages: - - package: fivetran/fivetran_utils - version: 0.4.7 - - package: dbt-labs/audit_helper - version: 0.9.0 -""" - ) - - def test_deps_upgrade(self, clean_start, mocker): - run_dbt(["deps", "--lock"]) - patched_lock = mocker.patch("dbt.task.deps.DepsTask.lock") - run_dbt(["deps", "--upgrade"]) - assert patched_lock.call_count == 1 diff --git a/tests/functional/dependencies/test_local_dependency.py b/tests/functional/dependencies/test_local_dependency.py deleted file mode 100644 index a4a42d1b7..000000000 --- a/tests/functional/dependencies/test_local_dependency.py +++ /dev/null @@ -1,356 +0,0 @@ -import json -import os -from pathlib import Path -import shutil -from unittest import mock - -from dbt.exceptions import DbtProjectError, DependencyError -from dbt.tests.util import check_relations_equal -from dbt_common.exceptions import CompilationError, DbtRuntimeError -import dbt_common.semver as semver -import pytest -import yaml - -from tests.functional.utils import ( - run_dbt, - run_dbt_and_capture, - up_one, -) - - -models__dep_source = """ -{# If our dependency source didn't exist, this would be an errror #} -select * from {{ source('seed_source', 'seed') }} -""" - -models__my_configured_model = """ -{{ - config(schema='configured') -}} -select * from {{ ref('model_to_import') }} -""" - -models__my_model = """ -select * from {{ ref('model_to_import') }} -""" - -models__source_override_model = """ -{# If our source override didn't take, this would be an errror #} -select * from {{ source('my_source', 'my_table') }} -""" - -models__iterate = """ -{% for x in no_such_dependency.no_such_method() %} -{% endfor %} -""" - -models__hooks_actual = """ -select * from {{ var('test_create_table') }} -union all -select * from {{ var('test_create_second_table') }} -""" - -models__hooks_expected = """ -{# surely there is a better way to do this! #} - -{% for _ in range(1, 5) %} -select {{ loop.index }} as id -{% if not loop.last %}union all{% endif %} -{% endfor %} -""" - -properties__schema_yml = """ -version: 2 -sources: - - name: my_source - schema: "{{ var('schema_override', target.schema) }}" - tables: - - name: my_table - identifier: seed_subpackage_generate_alias_name -""" - -macros__macro_sql = """ -{# This macro also exists in the dependency -dbt should be fine with that #} -{% macro some_overridden_macro() -%} -999 -{%- endmacro %} -""" - -macros__macro_override_schema_sql = """ -{% macro generate_schema_name(schema_name, node) -%} - - {{ schema_name }}_{{ node.schema }}_macro - -{%- endmacro %} -""" - - -class BaseDependencyTest(object): - @pytest.fixture(scope="class") - def macros(self): - return {"macro.sql": macros__macro_sql} - - @pytest.fixture(scope="class") - def models(self): - return { - "dep_source_model.sql": models__dep_source, - "my_configured_model.sql": models__my_configured_model, - "my_model.sql": models__my_model, - "source_override_model.sql": models__source_override_model, - } - - @pytest.fixture(scope="class") - def properties(self): - return { - "schema.yml": properties__schema_yml, - } - - @pytest.fixture(scope="class", autouse=True) - def modify_schema_fqn(self, project): - schema_fqn = "{}.{}".format( - project.database, - project.test_schema, - ) - schema_fqn_configured = "{}.{}".format( - project.database, - project.test_schema + "_configured", - ) - - project.created_schemas.append(schema_fqn) - project.created_schemas.append(schema_fqn_configured) - - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project, modify_schema_fqn): - shutil.copytree( - project.test_dir / Path("local_dependency"), - project.project_root / Path("local_dependency"), - ) - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "local_dependency"}]} - - -class TestSimpleDependency(BaseDependencyTest): - def test_local_dependency(self, project): - run_dbt(["deps"]) - run_dbt(["seed"]) - results = run_dbt() - assert len(results) == 5 - - assert {r.node.schema for r in results} == { - project.test_schema, - project.test_schema + "_configured", - } - - base_schema_nodes = [r.node for r in results if r.node.schema == project.test_schema] - assert len(base_schema_nodes) == 4 - - check_relations_equal( - project.adapter, - [ - f"{project.test_schema}.source_override_model", - f"{project.test_schema}.seed_subpackage_generate_alias_name", - ], - ) - check_relations_equal( - project.adapter, - [ - f"{project.test_schema}.dep_source_model", - f"{project.test_schema}.seed_subpackage_generate_alias_name", - ], - ) - - def test_no_dependency_paths(self, project): - run_dbt(["deps"]) - run_dbt(["seed"]) - - # prove dependency does not exist as model in project - dep_path = os.path.join("models_local", "model_to_import.sql") - results = run_dbt( - ["run", "--models", f"+{dep_path}"], - ) - assert len(results) == 0 - - # prove model can run when importing that dependency - local_path = Path("models") / "my_model.sql" - results = run_dbt( - ["run", "--models", f"+{local_path}"], - ) - assert len(results) == 2 - - -class TestSimpleDependencyRelativePath(BaseDependencyTest): - def test_local_dependency_relative_path(self, project): - last_dir = Path(project.project_root).name - with up_one(): - _, stdout = run_dbt_and_capture(["deps", "--project-dir", last_dir]) - assert ( - "Installed from <local @ local_dependency>" in stdout - ), "Test output didn't contain expected string" - - -class TestMissingDependency(object): - @pytest.fixture(scope="class") - def models(self): - return { - "iterate.sql": models__iterate, - } - - def test_missing_dependency(self, project): - # dbt should raise a runtime exception - with pytest.raises(DbtRuntimeError): - run_dbt(["compile"]) - - -class TestSimpleDependencyWithSchema(BaseDependencyTest): - def dbt_vargs(self, schema): - # we can't add this to the config because Sources don't respect dbt_project.yml - vars_arg = yaml.safe_dump({"schema_override": "dbt_test_{}_macro".format(schema)}) - return ["--vars", vars_arg] - - def project_config(self): - return { - "models": { - "schema": "dbt_test", - }, - "seeds": { - "schema": "dbt_test", - }, - } - - @mock.patch("dbt.config.project.get_installed_version") - def test_local_dependency_out_of_date(self, mock_get, project): - mock_get.return_value = semver.VersionSpecifier.from_version_string("0.0.1") - run_dbt(["deps"] + self.dbt_vargs(project.test_schema)) - # check seed - with pytest.raises(DbtProjectError) as exc: - run_dbt(["seed"] + self.dbt_vargs(project.test_schema)) - assert "--no-version-check" in str(exc.value) - # check run too - with pytest.raises(DbtProjectError) as exc: - run_dbt(["run"] + self.dbt_vargs(project.test_schema)) - assert "--no-version-check" in str(exc.value) - - @mock.patch("dbt.config.project.get_installed_version") - def test_local_dependency_out_of_date_no_check(self, mock_get): - mock_get.return_value = semver.VersionSpecifier.from_version_string("0.0.1") - run_dbt(["deps"]) - run_dbt(["seed", "--no-version-check"]) - results = run_dbt(["run", "--no-version-check"]) - assert len(results) == 5 - - -class TestSimpleDependencyNoVersionCheckConfig(BaseDependencyTest): - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "flags": { - "send_anonymous_usage_stats": False, - "version_check": False, - }, - "models": { - "schema": "dbt_test", - }, - "seeds": { - "schema": "dbt_test", - }, - } - - @pytest.fixture(scope="class") - def macros(self): - return {"macro.sql": macros__macro_override_schema_sql} - - @mock.patch("dbt.config.project.get_installed_version") - def test_local_dependency_out_of_date_no_check(self, mock_get, project): - # we can't add this to the config because Sources don't respect dbt_project.yml - base_schema = "dbt_test_{}_macro".format(project.test_schema) - vars_arg = yaml.safe_dump( - { - "schema_override": base_schema, - } - ) - - mock_get.return_value = semver.VersionSpecifier.from_version_string("0.0.1") - run_dbt(["deps", "--vars", vars_arg]) - run_dbt(["seed", "--vars", vars_arg]) - results = run_dbt(["run", "--vars", vars_arg]) - len(results) == 5 - - -class TestSimpleDependencyHooks(BaseDependencyTest): - @pytest.fixture(scope="class") - def models(self): - return { - "actual.sql": models__hooks_actual, - "expected.sql": models__hooks_expected, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - # these hooks should run first, so nothing to drop - return { - "on-run-start": [ - "drop table if exists {{ var('test_create_table') }}", - "drop table if exists {{ var('test_create_second_table') }}", - ] - } - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [{"local": "early_hook_dependency"}, {"local": "late_hook_dependency"}] - } - - @pytest.fixture(scope="class") - def prepare_dependencies(self, project): - shutil.copytree( - project.test_dir / Path("early_hook_dependency"), - project.project_root / Path("early_hook_dependency"), - ) - shutil.copytree( - project.test_dir / Path("late_hook_dependency"), - project.project_root / Path("late_hook_dependency"), - ) - - def test_hook_dependency(self, prepare_dependencies, project): - cli_vars = json.dumps( - { - "test_create_table": '"{}"."hook_test"'.format(project.test_schema), - "test_create_second_table": '"{}"."hook_test_2"'.format(project.test_schema), - } - ) - - run_dbt(["deps", "--vars", cli_vars]) - results = run_dbt(["run", "--vars", cli_vars]) - assert len(results) == 2 - check_relations_equal(project.adapter, ["actual", "expected"]) - - -class TestSimpleDependencyDuplicateName(BaseDependencyTest): - @pytest.fixture(scope="class", autouse=True) - def setUp(self): - pass # do not copy local dependency automatically - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "duplicate_dependency"}]} - - @pytest.fixture(scope="class") - def prepare_dependencies(self, project): - shutil.copytree( - project.test_dir / Path("duplicate_dependency"), - project.project_root / Path("duplicate_dependency"), - ) - - def test_local_dependency_same_name(self, prepare_dependencies, project): - with pytest.raises(DependencyError): - run_dbt(["deps"], expect_pass=False) - - def test_local_dependency_same_name_sneaky(self, prepare_dependencies, project): - shutil.copytree("duplicate_dependency", "./dbt_packages/duplicate_dependency") - with pytest.raises(CompilationError): - run_dbt(["compile"]) - - # needed to avoid compilation errors from duplicate package names in test autocleanup - run_dbt(["clean"]) diff --git a/tests/functional/dependencies/test_simple_dependency.py b/tests/functional/dependencies/test_simple_dependency.py deleted file mode 100644 index f35d902d6..000000000 --- a/tests/functional/dependencies/test_simple_dependency.py +++ /dev/null @@ -1,435 +0,0 @@ -import os -from pathlib import Path -import tempfile - -from dbt.exceptions import DbtProjectError -from dbt.tests.util import ( - check_relations_equal, - run_dbt, - write_config_file, -) -import pytest - - -models__disabled_one = """ -{{config(enabled=False)}} - -select 1 -""" - -models__disabled_two = """ -{{config(enabled=False)}} - -select * from {{ref('disabled_one')}} -""" - -models__empty = """ -""" - -models__view_summary = """ -{{ - config( - materialized='view' - ) -}} - - -with t as ( - - select * from {{ ref('view_model') }} - -) - -select date_trunc('year', updated_at) as year, - count(*) -from t -group by 1 -""" - - -class SimpleDependencyBase(object): - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project): - project.run_sql_file(project.test_data_dir / Path("seed.sql")) - - @pytest.fixture(scope="class") - def models(self): - return { - "empty.sql": models__empty, - "view_summary.sql": models__view_summary, - "view_summary.sql": models__view_summary, - } - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://github.com/dbt-labs/dbt-integration-project", - "revision": "1.1", - } - ] - } - - # These two functions included to enable override in ...NoProfile derived test class - @pytest.fixture(scope="class") - def run_deps(self, project): - return run_dbt(["deps"]) - - @pytest.fixture(scope="function") - def run_clean(self, project): - yield - - # clear test schema - assert os.path.exists("target") - run_dbt(["clean"]) - assert not os.path.exists("target") - - -class TestSimpleDependency(SimpleDependencyBase): - def test_simple_dependency(self, run_deps, project, run_clean): - """dependencies should draw from a changing base table""" - results = run_dbt() - assert len(results) == 4 - - check_relations_equal(project.adapter, ["seed", "table_model"]) - check_relations_equal(project.adapter, ["seed", "view_model"]) - check_relations_equal(project.adapter, ["seed", "incremental"]) - check_relations_equal(project.adapter, ["seed_summary", "view_summary"]) - - project.run_sql_file(project.test_data_dir / Path("update.sql")) - results = run_dbt() - assert len(results) == 4 - - check_relations_equal(project.adapter, ["seed", "table_model"]) - check_relations_equal(project.adapter, ["seed", "view_model"]) - check_relations_equal(project.adapter, ["seed", "incremental"]) - - -class TestSimpleDependencyWithDependenciesFile(SimpleDependencyBase): - @pytest.fixture(scope="class") - def packages(self): - return {} - - @pytest.fixture(scope="class") - def dependencies(self): - return { - "packages": [ - { - "git": "https://github.com/dbt-labs/dbt-integration-project", - "warn-unpinned": True, - } - ] - } - - def test_dependency_with_dependencies_file(self, run_deps, project): - # Tests that "packages" defined in a dependencies.yml file works - run_dbt(["deps"]) - results = run_dbt() - assert len(results) == 4 - - -class TestSimpleDependencyWithEmptyPackagesFile(SimpleDependencyBase): - @pytest.fixture(scope="class") - def packages(self): - return " " - - def test_dependency_with_empty_packages_file(self, run_deps, project): - # Tests that an empty packages file doesn't fail with a Python error - run_dbt(["deps"]) - - -class TestSimpleDependencyNoProfile(SimpleDependencyBase): - """dbt deps and clean commands should not require a profile.""" - - @pytest.fixture(scope="class") - def run_deps(self, project): - with tempfile.TemporaryDirectory() as tmpdir: - result = run_dbt(["deps", "--profiles-dir", tmpdir]) - return result - - @pytest.fixture(scope="class") - def run_clean(self, project): - with tempfile.TemporaryDirectory() as tmpdir: - result = run_dbt(["clean", "--profiles-dir", tmpdir]) - return result - - def test_simple_dependency_no_profile(self, project, run_deps, run_clean): - """only need fixtures as opposed to any model assertions since those are - irrelevant and won't occur within the same runtime as a dbt run -s ...""" - pass - - -class TestSimpleDependencyWithModels(SimpleDependencyBase): - def test_simple_dependency_with_models(self, run_deps, project, run_clean): - results = run_dbt(["run", "--models", "view_model+"]) - len(results) == 2 - - check_relations_equal(project.adapter, ["seed", "view_model"]) - check_relations_equal(project.adapter, ["seed_summary", "view_summary"]) - - created_models = project.get_tables_in_schema() - - assert "table_model" not in created_models - assert "incremental" not in created_models - assert created_models["view_model"] == "view" - assert created_models["view_summary"] == "view" - - -class TestSimpleDependencyUnpinned(object): - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project): - project.run_sql_file(project.test_data_dir / Path("seed.sql")) - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://github.com/dbt-labs/dbt-integration-project", - "warn-unpinned": True, - } - ] - } - - def test_simple_dependency(self, project): - run_dbt(["deps"]) - - -class TestSimpleDependencyWithDuplicates(object): - # dbt should convert these into a single dependency internally - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://github.com/dbt-labs/dbt-integration-project", - "revision": "dbt/1.0.0", - }, - { - "git": "https://github.com/dbt-labs/dbt-integration-project.git", - "revision": "dbt/1.0.0", - }, - ] - } - - def test_simple_dependency_deps(self, project): - run_dbt(["deps"]) - - -class TestSimpleDependencyWithSubdirs(object): - # dbt should convert these into a single dependency internally - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://github.com/dbt-labs/dbt-multipe-packages.git", - "subdirectory": "dbt-utils-main", - "revision": "v0.1.0", - }, - { - "git": "https://github.com/dbt-labs/dbt-multipe-packages.git", - "subdirectory": "dbt-date-main", - "revision": "v0.1.0", - }, - ] - } - - def test_git_with_multiple_subdir(self, project): - run_dbt(["deps"]) - assert os.path.exists("package-lock.yml") - expected = """packages: -- git: https://github.com/dbt-labs/dbt-multipe-packages.git - revision: 53782f3ede8fdf307ee1d8e418aa65733a4b72fa - subdirectory: dbt-utils-main -- git: https://github.com/dbt-labs/dbt-multipe-packages.git - revision: 53782f3ede8fdf307ee1d8e418aa65733a4b72fa - subdirectory: dbt-date-main -sha1_hash: b9c8042f29446c55a33f9f211737f445a640c7a1 -""" - with open("package-lock.yml") as fp: - contents = fp.read() - assert contents == expected - assert len(os.listdir("dbt_packages")) == 2 - - -class TestRekeyedDependencyWithSubduplicates(object): - # this revision of dbt-integration-project requires dbt-utils.git@0.5.0, which the - # package config handling should detect - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://github.com/dbt-labs/dbt-integration-project", - "revision": "config-1.0.0-deps", - }, - { - "git": "https://github.com/dbt-labs/dbt-utils", - "revision": "0.5.0", - }, - ] - } - - def test_simple_dependency_deps(self, project): - run_dbt(["deps"]) - assert len(os.listdir("dbt_packages")) == 2 - - -class TestTarballNestedDependencies(object): - # this version of dbt_expectations has a dependency on dbt_date, which the - # package config handling should detect - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "tarball": "https://github.com/calogica/dbt-expectations/archive/refs/tags/0.9.0.tar.gz", - "name": "dbt_expectations", - }, - ] - } - - def test_simple_dependency_deps(self, project): - run_dbt(["deps"]) - assert set(os.listdir("dbt_packages")) == set(["dbt_expectations", "dbt_date"]) - - -class DependencyBranchBase(object): - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project): - project.run_sql_file(project.test_data_dir / Path("seed.sql")) - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://github.com/dbt-labs/dbt-integration-project", - "revision": "dbt/1.0.0", - }, - ] - } - - def deps_run_assert_equality(self, project): - run_dbt(["deps"]) - results = run_dbt() - assert len(results) == 4 - - check_relations_equal(project.adapter, ["seed", "table_model"]) - check_relations_equal(project.adapter, ["seed", "view_model"]) - check_relations_equal(project.adapter, ["seed", "incremental"]) - - created_models = project.get_tables_in_schema() - - assert created_models["table_model"] == "table" - assert created_models["view_model"] == "view" - assert created_models["view_summary"] == "view" - assert created_models["incremental"] == "table" - - -class TestSimpleDependencyBranch(DependencyBranchBase): - @pytest.fixture(scope="class") - def models(self): - return { - "view_summary.sql": models__view_summary, - } - - def test_simple_dependency(self, project): - self.deps_run_assert_equality(project) - check_relations_equal(project.adapter, ["seed_summary", "view_summary"]) - - project.run_sql_file(project.test_data_dir / Path("update.sql")) - self.deps_run_assert_equality(project) - - -class TestSimpleDependencyBranchWithEmpty(DependencyBranchBase): - @pytest.fixture(scope="class") - def models(self): - """extra models included""" - return { - "disabled_one.sql": models__disabled_one, - "disabled_two.sql": models__disabled_two, - "view_summary.sql": models__view_summary, - "empty.sql": models__empty, - } - - def test_empty_models_not_compiled_in_dependencies(self, project): - self.deps_run_assert_equality(project) - - models = project.get_tables_in_schema() - - assert "empty" not in models.keys() - - -class TestSimpleDependencyBadProfile(object): - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "models": { - "+any_config": "{{ target.name }}", - "+enabled": "{{ target.name in ['redshift', 'postgres'] | as_bool }}", - }, - } - - # Write out the profile data as a yaml file - @pytest.fixture(scope="class", autouse=True) - def dbt_profile_target(self): - # Need to set the environment variable here initially because - # the unittest setup does a load_config. - os.environ["PROFILE_TEST_HOST"] = "localhost" - return { - "type": "postgres", - "threads": 4, - "host": "{{ env_var('PROFILE_TEST_HOST') }}", - "port": 5432, - "user": "root", - "pass": "password", - "dbname": "dbt", - } - - def test_deps_bad_profile(self, project): - del os.environ["PROFILE_TEST_HOST"] - run_dbt(["deps"]) - run_dbt(["clean"]) - - -class TestSimpleDependcyTarball(object): - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "tarball": "https://codeload.github.com/dbt-labs/dbt-utils/tar.gz/0.9.6", - "name": "dbt_utils", - } - ] - } - - def test_deps_simple_tarball_doesnt_error_out(self, project): - run_dbt(["deps"]) - assert len(os.listdir("dbt_packages")) == 1 - - -class TestBadTarballDependency(object): - def test_malformed_tarball_package_causes_exception(self, project): - # We have to specify the bad formatted package here because if we do it - # in a `packages` fixture, the test will blow up in the setup phase, meaning - # we can't appropriately catch it with a `pytest.raises` - bad_tarball_package_spec = { - "packages": [ - { - "tarball": "https://codeload.github.com/dbt-labs/dbt-utils/tar.gz/0.9.6", - "version": "dbt_utils", - } - ] - } - write_config_file(bad_tarball_package_spec, "packages.yml") - - with pytest.raises( - DbtProjectError, match=r"The packages.yml file in this project is malformed" - ) as e: - run_dbt(["deps"]) - assert e is not None diff --git a/tests/functional/dependencies/test_simple_dependency_with_configs.py b/tests/functional/dependencies/test_simple_dependency_with_configs.py deleted file mode 100644 index 55ecff9ad..000000000 --- a/tests/functional/dependencies/test_simple_dependency_with_configs.py +++ /dev/null @@ -1,106 +0,0 @@ -from pathlib import Path - -from dbt.tests.util import check_relations_equal, run_dbt -import pytest - - -models__view_summary = """ -{{ - config( - materialized='view' - ) -}} - - -with t as ( - - select * from {{ ref('view_model') }} - -) - -select date_trunc('year', updated_at) as year, - count(*) -from t -group by 1 -""" - - -class BaseTestSimpleDependencyWithConfigs(object): - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project): - project.run_sql_file(project.test_data_dir / Path("seed.sql")) - - @pytest.fixture(scope="class") - def models(self): - return { - "view_summary.sql": models__view_summary, - } - - -class TestSimpleDependencyWithConfigs(BaseTestSimpleDependencyWithConfigs): - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://github.com/dbt-labs/dbt-integration-project", - "revision": "with-configs-1.0.0", - }, - ] - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "vars": { - "dbt_integration_project": {"bool_config": True}, - }, - } - - def test_simple_dependency(self, project): - run_dbt(["deps"]) - results = run_dbt() - assert len(results) == 5 - - check_relations_equal(project.adapter, ["seed_config_expected_1", "config"]) - check_relations_equal(project.adapter, ["seed", "table_model"]) - check_relations_equal(project.adapter, ["seed", "view_model"]) - check_relations_equal(project.adapter, ["seed", "incremental"]) - - -class TestSimpleDependencyWithOverriddenConfigs(BaseTestSimpleDependencyWithConfigs): - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://github.com/dbt-labs/dbt-integration-project", - "revision": "with-configs-1.0.0", - }, - ] - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "vars": { - # project-level configs - "dbt_integration_project": { - "config_1": "abc", - "config_2": "def", - "bool_config": True, - }, - }, - } - - def test_simple_dependency(self, project): - run_dbt(["deps"]) - results = run_dbt(["run"]) - len(results) == 5 - - check_relations_equal(project.adapter, ["seed_config_expected_2", "config"]) - check_relations_equal(project.adapter, ["seed", "table_model"]) - check_relations_equal(project.adapter, ["seed", "view_model"]) - check_relations_equal(project.adapter, ["seed", "incremental"]) diff --git a/tests/functional/metrics/fixtures.py b/tests/functional/metrics/fixtures.py deleted file mode 100644 index 5a8373fbe..000000000 --- a/tests/functional/metrics/fixtures.py +++ /dev/null @@ -1,666 +0,0 @@ -# not strictly necessary, but this reflects the integration tests currently in the 'dbt-metrics' package right now -# i'm including just the first 10 rows for more concise 'git diff' - -mock_purchase_data_csv = """purchased_at,payment_type,payment_total -2021-02-14 17:52:36,maestro,2418.94 -2021-02-15 04:16:50,jcb,3043.28 -2021-02-15 11:30:45,solo,1505.81 -2021-02-16 13:08:18,,1532.85 -2021-02-17 05:41:34,americanexpress,319.91 -2021-02-18 06:47:32,jcb,2143.44 -2021-02-19 01:37:09,jcb,840.1 -2021-02-19 03:38:49,jcb,1388.18 -2021-02-19 04:22:41,jcb,2834.96 -2021-02-19 13:28:50,china-unionpay,2440.98 -""".strip() - -models_people_sql = """ -select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at -union all -select 2 as id, 'Jeremy' as first_name, 'Cohen' as last_name, 'indigo' as favorite_color, true as loves_dbt, 4 as tenure, current_timestamp as created_at -union all -select 3 as id, 'Callum' as first_name, 'McCann' as last_name, 'emerald' as favorite_color, true as loves_dbt, 0 as tenure, current_timestamp as created_at -""" - -semantic_model_people_yml = """ -version: 2 - -semantic_models: - - name: semantic_people - model: ref('people') - dimensions: - - name: favorite_color - type: categorical - - name: created_at - type: TIME - type_params: - time_granularity: day - measures: - - name: years_tenure - agg: SUM - expr: tenure - - name: people - agg: count - expr: id - entities: - - name: id - type: primary - defaults: - agg_time_dimension: created_at -""" - -basic_metrics_yml = """ -version: 2 - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' - - - name: collective_tenure - label: "Collective tenure" - description: Total number of years of team experience - type: simple - type_params: - measure: - name: "years_tenure" - filter: "{{ Dimension('id__loves_dbt') }} is true" - - - name: average_tenure - label: "Average tenure" - description: "The average tenure per person" - type: ratio - type_params: - numerator: collective_tenure - denominator: number_of_people - - - name: average_tenure_plus_one - label: "Average tenure, plus 1" - description: "The average tenure per person" - type: derived - type_params: - metrics: - - average_tenure - expr: "average_tenure + 1" -""" - -metricflow_time_spine_sql = """ -SELECT to_date('02/20/2023, 'mm/dd/yyyy') as date_day -""" - -models_people_metrics_yml = """ -version: 2 - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' - - - name: collective_tenure - label: "Collective tenure" - description: Total number of years of team experience - type: simple - type_params: - measure: - name: years_tenure - filter: "{{ Dimension('id__loves_dbt') }} is true" - join_to_timespine: true - fill_nulls_with: 0 - - - name: collective_window - label: "Collective window" - description: Testing window - type: simple - type_params: - measure: - name: years_tenure - filter: "{{ Dimension('id__loves_dbt') }} is true" - window: 14 days - - - name: average_tenure - label: Average Tenure - description: The average tenure of our people - type: ratio - type_params: - numerator: collective_tenure - denominator: number_of_people - - - name: average_tenure_minus_people - label: Average Tenure minus People - description: Well this isn't really useful is it? - type: derived - type_params: - expr: average_tenure - number_of_people - metrics: - - average_tenure - - number_of_people - -""" - -invalid_models_people_metrics_yml = """ -version: 2 - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - model: "ref(people)" - calculation_method: count - expression: "*" - timestamp: created_at - time_grains: [day, week, month] - dimensions: - - favorite_color - - loves_dbt - meta: - my_meta: 'testing' - - - name: collective_tenure - label: "Collective tenure" - description: Total number of years of team experience - model: "ref(people)" - calculation_method: sum - expression: tenure - timestamp: created_at - time_grains: [day] - filters: - - field: loves_dbt - operator: 'is' - value: 'true' - -""" - -invalid_metrics_missing_model_yml = """ -version: 2 - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - calculation_method: count - expression: "*" - timestamp: created_at - time_grains: [day, week, month] - dimensions: - - favorite_color - - loves_dbt - meta: - my_meta: 'testing' - - - name: collective_tenure - label: "Collective tenure" - description: Total number of years of team experience - calculation_method: sum - expression: tenure - timestamp: created_at - time_grains: [day] - filters: - - field: loves_dbt - operator: 'is' - value: 'true' - -""" - -invalid_metrics_missing_expression_yml = """ -version: 2 -metrics: - - name: number_of_people - label: "Number of people" - model: "ref(people)" - description: Total count of people - calculation_method: count - timestamp: created_at - time_grains: [day, week, month] - dimensions: - - favorite_color - - loves_dbt - meta: - my_meta: 'testing' -""" - -names_with_spaces_metrics_yml = """ -version: 2 - -metrics: - - - name: number of people - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' - -""" - -names_with_special_chars_metrics_yml = """ -version: 2 - -metrics: - - - name: number_of_people! - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' - -""" - - -names_with_leading_numeric_metrics_yml = """ -version: 2 - -metrics: - - - name: 1_number_of_people - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' - -""" - -long_name_metrics_yml = """ -version: 2 - -metrics: - - - name: this_name_is_going_to_contain_more_than_250_characters_but_be_otherwise_acceptable_and_then_will_throw_an_error_which_I_expect_to_happen_and_repeat_this_name_is_going_to_contain_more_than_250_characters_but_be_otherwise_acceptable_and_then_will_throw_an_error_which_I_expect_to_happen - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' - -""" - -downstream_model_sql = """ --- this model will depend on these three metrics -{% set some_metrics = [ - metric('count_orders'), - metric('sum_order_revenue'), - metric('average_order_value') -] %} - -/* -{% if not execute %} - - -- the only properties available to us at 'parse' time are: - -- 'metric_name' - -- 'package_name' (None if same package) - - {% set metric_names = [] %} - {% for m in some_metrics %} - {% do metric_names.append(m.metric_name) %} - {% endfor %} - - -- this config does nothing, but it lets us check these values below - {{ config(metric_names = metric_names) }} - -{% else %} - - -- these are the properties available to us at 'execution' time - - {% for m in some_metrics %} - name: {{ m.name }} - label: {{ m.label }} - type: {{ m.type }} - type_params: {{ m.type_params }} - filter: {{ m.filter }} - {% endfor %} - -{% endif %} - -select 1 as id -""" - -invalid_derived_metric_contains_model_yml = """ -version: 2 -metrics: - - name: count_orders - label: Count orders - model: ref('mock_purchase_data') - - calculation_method: count - expression: "*" - timestamp: purchased_at - time_grains: [day, week, month, quarter, year] - - dimensions: - - payment_type - - - name: sum_order_revenue - label: Total order revenue - model: ref('mock_purchase_data') - - calculation_method: sum - expression: "payment_total" - timestamp: purchased_at - time_grains: [day, week, month, quarter, year] - - dimensions: - - payment_type - - - name: average_order_value - label: Average Order Value - - calculation_method: derived - expression: "{{metric('sum_order_revenue')}} / {{metric('count_orders')}} " - model: ref('mock_purchase_data') - timestamp: purchased_at - time_grains: [day, week, month, quarter, year] - - dimensions: - - payment_type -""" - -purchasing_model_sql = """ -select purchased_at, payment_type, payment_total from {{ ref('mock_purchase_data') }} -""" - -semantic_model_purchasing_yml = """ -version: 2 - -semantic_models: - - name: semantic_purchasing - model: ref('purchasing') - measures: - - name: num_orders - agg: COUNT - expr: purchased_at - - name: order_revenue - agg: SUM - expr: payment_total - dimensions: - - name: purchased_at - type: TIME - entities: - - name: purchase - type: primary - expr: '1' - defaults: - agg_time_dimension: purchased_at - -""" - -derived_metric_yml = """ -version: 2 -metrics: - - name: count_orders - label: Count orders - type: simple - type_params: - measure: num_orders - - - name: sum_order_revenue - label: Total order revenue - type: simple - type_params: - measure: order_revenue - - - name: average_order_value - label: Average Order Value - type: ratio - type_params: - numerator: - name: sum_order_revenue - denominator: - name: count_orders -""" - -disabled_metric_level_schema_yml = """ -version: 2 - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - config: - enabled: False - meta: - my_meta: 'testing' - - - name: collective_tenure - label: "Collective tenure" - description: Total number of years of team experience - type: simple - type_params: - measure: - name: years_tenure - filter: "{{ Dimension('id__loves_dbt') }} is true" - -""" - -enabled_metric_level_schema_yml = """ -version: 2 - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - config: - enabled: True - meta: - my_meta: 'testing' - - - name: collective_tenure - label: "Collective tenure" - description: Total number of years of team experience - type: simple - type_params: - measure: - name: years_tenure - filter: "{{ Dimension('id__loves_dbt') }} is true" - -""" - -models_people_metrics_sql = """ --- this model will depend on these two metrics -{% set some_metrics = [ - metric('number_of_people'), - metric('collective_tenure') -] %} - -/* -{% if not execute %} - - -- the only properties available to us at 'parse' time are: - -- 'metric_name' - -- 'package_name' (None if same package) - - {% set metric_names = [] %} - {% for m in some_metrics %} - {% do metric_names.append(m.metric_name) %} - {% endfor %} - - -- this config does nothing, but it lets us check these values below - {{ config(metric_names = metric_names) }} - -{% else %} - - -- these are the properties available to us at 'execution' time - - {% for m in some_metrics %} - name: {{ m.name }} - label: {{ m.label }} - type: {{ m.type }} - type_params: {{ m.type_params }} - filter: {{ m.filter }} - window: {{ m.window }} - {% endfor %} - -{% endif %} - -select 1 as id -""" - -metrics_1_yml = """ -version: 2 - -metrics: - - name: some_metric - label: Some Metric - type: simple - type_params: - measure: some_measure -""" - -metrics_2_yml = """ -version: 2 - -metrics: - - name: some_metric - label: Some Metric - type: simple - type_params: - measure: some_measure -""" - -model_a_sql = """ -select 1 as fun -""" - -model_b_sql = """ --- {{ metric('some_metric') }} - -{% if execute %} - {% set model_ref_node = graph.nodes.values() | selectattr('name', 'equalto', 'model_a') | first %} - {% set relation = api.Relation.create( - database = model_ref_node.database, - schema = model_ref_node.schema, - identifier = model_ref_node.alias - ) - %} -{% else %} - {% set relation = "" %} -{% endif %} - --- this one is a real ref -select * from {{ ref('model_a') }} -union all --- this one is synthesized via 'graph' var -select * from {{ relation }} -""" - -invalid_config_metric_yml = """ -version: 2 - -metrics: - - name: number_of_people - label: "Number of people" - config: - enabled: True and False - description: Total count of people - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' -""" - -invalid_metric_without_timestamp_with_time_grains_yml = """ -version: 2 - -metrics: - - name: number_of_people - label: "Number of people" - description: Total count of people - model: "ref('people')" - time_grains: [day, week, month] - calculation_method: count - expression: "*" - dimensions: - - favorite_color - - loves_dbt - meta: - my_meta: 'testing' -""" - -invalid_metric_without_timestamp_with_window_yml = """ -version: 2 - -metrics: - - name: number_of_people - label: "Number of people" - description: Total count of people - model: "ref('people')" - window: - count: 14 - period: day - calculation_method: count - expression: "*" - dimensions: - - favorite_color - - loves_dbt - meta: - my_meta: 'testing' -""" - -conversion_semantic_model_purchasing_yml = """ -version: 2 - -semantic_models: - - name: semantic_purchasing - model: ref('purchasing') - measures: - - name: num_orders - agg: COUNT - expr: purchased_at - - name: num_visits - agg: SUM - expr: 1 - dimensions: - - name: purchased_at - type: TIME - entities: - - name: purchase - type: primary - expr: '1' - defaults: - agg_time_dimension: purchased_at - -""" - -conversion_metric_yml = """ -version: 2 -metrics: - - name: converted_orders_over_visits - label: Number of orders converted from visits - type: conversion - type_params: - conversion_type_params: - base_measure: num_visits - conversion_measure: num_orders - entity: purchase -""" diff --git a/tests/functional/metrics/test_metric_configs.py b/tests/functional/metrics/test_metric_configs.py deleted file mode 100644 index f4b75015d..000000000 --- a/tests/functional/metrics/test_metric_configs.py +++ /dev/null @@ -1,206 +0,0 @@ -from dbt.contracts.graph.model_config import MetricConfig -from dbt.exceptions import ParsingError -from dbt.tests.util import get_manifest, run_dbt, update_config_file -from dbt_common.dataclass_schema import ValidationError -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.metrics.fixtures import ( - disabled_metric_level_schema_yml, - enabled_metric_level_schema_yml, - invalid_config_metric_yml, - metricflow_time_spine_sql, - models_people_metrics_sql, - models_people_metrics_yml, - models_people_sql, - semantic_model_people_yml, -) - - -class MetricConfigTests: - @pytest.fixture(scope="class", autouse=True) - def setUp(self): - pytest.expected_config = MetricConfig( - enabled=True, - ) - - -# Test enabled config in dbt_project.yml -class TestMetricEnabledConfigProjectLevel(MetricConfigTests): - @pytest.fixture(scope="class") - def models(self): - return { - "people.sql": models_people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_model_people.yml": semantic_model_people_yml, - "schema.yml": models_people_metrics_yml, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "metrics": { - "average_tenure_minus_people": { - "enabled": True, - }, - } - } - - def test_enabled_metric_config_dbt_project(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "metric.test.average_tenure_minus_people" in manifest.metrics - - new_enabled_config = { - "metrics": { - "test": { - "average_tenure_minus_people": { - "enabled": False, - }, - } - } - } - update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "metric.test.average_tenure_minus_people" not in manifest.metrics - assert "metric.test.collective_tenure" in manifest.metrics - - -# Test enabled config at metrics level in yml file -class TestConfigYamlMetricLevel(MetricConfigTests): - @pytest.fixture(scope="class") - def models(self): - return { - "people.sql": models_people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_model_people.yml": semantic_model_people_yml, - "schema.yml": disabled_metric_level_schema_yml, - } - - def test_metric_config_yaml_metric_level(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "metric.test.number_of_people" not in manifest.metrics - assert "metric.test.collective_tenure" in manifest.metrics - - -# Test inheritence - set configs at project and metric level - expect metric level to win -class TestMetricConfigsInheritence(MetricConfigTests): - @pytest.fixture(scope="class") - def models(self): - return { - "people.sql": models_people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_model_people.yml": semantic_model_people_yml, - "schema.yml": enabled_metric_level_schema_yml, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return {"metrics": {"enabled": False}} - - def test_metrics_all_configs(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - # This should be overridden - assert "metric.test.number_of_people" in manifest.metrics - # This should stay disabled - assert "metric.test.collective_tenure" not in manifest.metrics - - config_test_table = manifest.metrics.get("metric.test.number_of_people").config - - assert isinstance(config_test_table, MetricConfig) - assert config_test_table == pytest.expected_config - - -# Test CompilationError if a model references a disabled metric -class TestDisabledMetricRef(MetricConfigTests): - @pytest.fixture(scope="class") - def models(self): - return { - "people.sql": models_people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_model_people.yml": semantic_model_people_yml, - "people_metrics.sql": models_people_metrics_sql, - "schema.yml": models_people_metrics_yml, - } - - def test_disabled_metric_ref_model(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "metric.test.number_of_people" in manifest.metrics - assert "metric.test.collective_tenure" in manifest.metrics - assert "model.test.people_metrics" in manifest.nodes - assert "metric.test.average_tenure" in manifest.metrics - assert "metric.test.average_tenure_minus_people" in manifest.metrics - - new_enabled_config = { - "metrics": { - "test": { - "number_of_people": { - "enabled": False, - }, - "average_tenure_minus_people": { - "enabled": False, - }, - "average_tenure": { - "enabled": False, - }, - } - } - } - - update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") - with pytest.raises(CompilationError): - run_dbt(["parse"]) - - -# Test invalid metric configs -class TestInvalidMetric(MetricConfigTests): - @pytest.fixture(scope="class") - def models(self): - return { - "people.sql": models_people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_model_people.yml": semantic_model_people_yml, - "schema.yml": invalid_config_metric_yml, - } - - def test_invalid_config_metric(self, project): - with pytest.raises(ValidationError) as excinfo: - run_dbt(["parse"]) - expected_msg = "'True and False' is not of type 'boolean'" - assert expected_msg in str(excinfo.value) - - -class TestDisabledMetric(MetricConfigTests): - @pytest.fixture(scope="class") - def models(self): - return { - "people.sql": models_people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_model_people.yml": semantic_model_people_yml, - "schema.yml": models_people_metrics_yml, - } - - def test_disabling_upstream_metric_errors(self, project): - run_dbt(["parse"]) # shouldn't error out yet - - new_enabled_config = { - "metrics": { - "test": { - "number_of_people": { - "enabled": False, - }, - } - } - } - - update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") - with pytest.raises(ParsingError) as excinfo: - run_dbt(["parse"]) - expected_msg = ( - "The metric `number_of_people` is disabled and thus cannot be referenced." - ) - assert expected_msg in str(excinfo.value) diff --git a/tests/functional/metrics/test_metric_deferral.py b/tests/functional/metrics/test_metric_deferral.py deleted file mode 100644 index 11affcd00..000000000 --- a/tests/functional/metrics/test_metric_deferral.py +++ /dev/null @@ -1,83 +0,0 @@ -import os -from pathlib import Path - -from dbt.tests.util import copy_file, run_dbt, write_file -import pytest - -from tests.functional.metrics.fixtures import ( - metrics_1_yml, - metrics_2_yml, - model_a_sql, - model_b_sql, -) - - -class TestMetricDeferral: - @pytest.fixture(scope="class", autouse=True) - def setup(self, project): - # Create "prod" schema - prod_schema_name = project.test_schema + "_prod" - project.create_test_schema(schema_name=prod_schema_name) - # Create "state" directory - path = Path(project.project_root) / "state" - Path.mkdir(path) - - @pytest.fixture(scope="class") - def dbt_profile_data(self, unique_schema): - return { - "test": { - "outputs": { - "default": { - "type": "postgres", - "threads": 4, - "host": "localhost", - "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), - "user": os.getenv("POSTGRES_TEST_USER", "root"), - "pass": os.getenv("POSTGRES_TEST_PASS", "password"), - "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), - "schema": unique_schema, - }, - "prod": { - "type": "postgres", - "threads": 4, - "host": "localhost", - "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), - "user": os.getenv("POSTGRES_TEST_USER", "root"), - "pass": os.getenv("POSTGRES_TEST_PASS", "password"), - "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), - "schema": unique_schema + "_prod", - }, - }, - "target": "default", - }, - } - - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": model_a_sql, - "model_b.sql": model_b_sql, - "metrics.yml": metrics_1_yml, - } - - @pytest.mark.skip("TODO") - def test_metric_deferral(self, project): - results = run_dbt(["run", "--target", "prod"]) - assert len(results) == 2 - - # copy manifest.json to "state" directory - target_path = os.path.join(project.project_root, "target") - copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) - - # Change metrics file - write_file(metrics_2_yml, project.project_root, "models", "metrics.yml") - - # Confirm that some_metric + model_b are both selected, and model_a is not selected - results = run_dbt(["ls", "-s", "state:modified+", "--state", "state/", "--target", "prod"]) - assert results == ["metric:test.some_metric", "test.model_b"] - - # Run in default schema - results = run_dbt( - ["run", "-s", "state:modified+", "--state", "state/", "--defer", "--target", "default"] - ) - assert len(results) == 1 diff --git a/tests/functional/metrics/test_metric_helper_functions.py b/tests/functional/metrics/test_metric_helper_functions.py deleted file mode 100644 index 7f12232ae..000000000 --- a/tests/functional/metrics/test_metric_helper_functions.py +++ /dev/null @@ -1,53 +0,0 @@ -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.metrics import ResolvedMetricReference -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.metrics.fixtures import ( - basic_metrics_yml, - metricflow_time_spine_sql, - models_people_sql, - semantic_model_people_yml, -) - - -class TestMetricHelperFunctions: - @pytest.fixture(scope="class") - def models(self): - return { - "metrics.yml": basic_metrics_yml, - "semantic_people.yml": semantic_model_people_yml, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "people.sql": models_people_sql, - } - - def test_derived_metric( - self, - project, - ): - # initial parse - manifest = run_dbt(["parse"]) - assert isinstance(manifest, Manifest) - - parsed_metric = manifest.metrics["metric.test.average_tenure_plus_one"] - testing_metric = ResolvedMetricReference(parsed_metric, manifest) - - full_metric_dependency = set(testing_metric.full_metric_dependency()) - expected_full_metric_dependency = set( - ["average_tenure_plus_one", "average_tenure", "collective_tenure", "number_of_people"] - ) - assert full_metric_dependency == expected_full_metric_dependency - - base_metric_dependency = set(testing_metric.base_metric_dependency()) - expected_base_metric_dependency = set(["collective_tenure", "number_of_people"]) - assert base_metric_dependency == expected_base_metric_dependency - - derived_metric_dependency = set(testing_metric.derived_metric_dependency()) - expected_derived_metric_dependency = set(["average_tenure_plus_one", "average_tenure"]) - assert derived_metric_dependency == expected_derived_metric_dependency - - derived_metric_dependency_depth = list(testing_metric.derived_metric_dependency_depth()) - expected_derived_metric_dependency_depth = list( - [{"average_tenure_plus_one": 1}, {"average_tenure": 2}] - ) - assert derived_metric_dependency_depth == expected_derived_metric_dependency_depth diff --git a/tests/functional/metrics/test_metrics.py b/tests/functional/metrics/test_metrics.py deleted file mode 100644 index 1275e47a7..000000000 --- a/tests/functional/metrics/test_metrics.py +++ /dev/null @@ -1,399 +0,0 @@ -from dbt.cli.main import dbtRunner -from dbt.contracts.graph.manifest import Manifest -from dbt.exceptions import ParsingError -from dbt.tests.util import get_manifest, run_dbt -import pytest - -from tests.functional.metrics.fixtures import ( - conversion_metric_yml, - conversion_semantic_model_purchasing_yml, - derived_metric_yml, - downstream_model_sql, - invalid_derived_metric_contains_model_yml, - invalid_metric_without_timestamp_with_time_grains_yml, - invalid_metric_without_timestamp_with_window_yml, - invalid_metrics_missing_expression_yml, - invalid_metrics_missing_model_yml, - invalid_models_people_metrics_yml, - long_name_metrics_yml, - metricflow_time_spine_sql, - mock_purchase_data_csv, - models_people_metrics_yml, - models_people_sql, - names_with_leading_numeric_metrics_yml, - names_with_spaces_metrics_yml, - names_with_special_chars_metrics_yml, - purchasing_model_sql, - semantic_model_people_yml, - semantic_model_purchasing_yml, -) - - -class TestSimpleMetrics: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": models_people_metrics_yml, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_model_people.yml": semantic_model_people_yml, - "people.sql": models_people_sql, - } - - def test_simple_metric( - self, - project, - ): - runner = dbtRunner() - result = runner.invoke(["parse"]) - assert result.success - assert isinstance(result.result, Manifest) - manifest = get_manifest(project.project_root) - metric_ids = list(manifest.metrics.keys()) - expected_metric_ids = [ - "metric.test.number_of_people", - "metric.test.collective_tenure", - "metric.test.collective_window", - "metric.test.average_tenure", - "metric.test.average_tenure_minus_people", - ] - assert metric_ids == expected_metric_ids - - assert ( - len(manifest.metrics["metric.test.number_of_people"].type_params.input_measures) == 1 - ) - assert ( - len(manifest.metrics["metric.test.collective_tenure"].type_params.input_measures) == 1 - ) - assert ( - len(manifest.metrics["metric.test.collective_window"].type_params.input_measures) == 1 - ) - assert len(manifest.metrics["metric.test.average_tenure"].type_params.input_measures) == 2 - assert ( - len( - manifest.metrics[ - "metric.test.average_tenure_minus_people" - ].type_params.input_measures - ) - == 3 - ) - - -class TestInvalidRefMetrics: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": invalid_models_people_metrics_yml, - "people.sql": models_people_sql, - } - - # tests that we get a ParsingError with an invalid model ref, where - # the model name does not have quotes - def test_simple_metric( - self, - project, - ): - # initial run - with pytest.raises(ParsingError): - run_dbt(["run"]) - - -class TestInvalidMetricMissingModel: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": invalid_metrics_missing_model_yml, - "people.sql": models_people_sql, - } - - # tests that we get a ParsingError with an invalid model ref, where - # the model name does not have quotes - def test_simple_metric( - self, - project, - ): - # initial run - with pytest.raises(ParsingError): - run_dbt(["run"]) - - -class TestInvalidMetricMissingExpression: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": invalid_metrics_missing_expression_yml, - "people.sql": models_people_sql, - } - - # tests that we get a ParsingError with a missing expression - def test_simple_metric( - self, - project, - ): - # initial run - with pytest.raises(ParsingError): - run_dbt(["run"]) - - -class TestNamesWithSpaces: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": names_with_spaces_metrics_yml, - "people.sql": models_people_sql, - } - - def test_names_with_spaces(self, project): - with pytest.raises(ParsingError) as exc: - run_dbt(["run"]) - assert "cannot contain spaces" in str(exc.value) - - -class TestNamesWithSpecialChar: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": names_with_special_chars_metrics_yml, - "people.sql": models_people_sql, - } - - def test_names_with_special_char(self, project): - with pytest.raises(ParsingError) as exc: - run_dbt(["run"]) - assert "must contain only letters, numbers and underscores" in str(exc.value) - - -class TestNamesWithLeandingNumber: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": names_with_leading_numeric_metrics_yml, - "people.sql": models_people_sql, - } - - def test_names_with_leading_number(self, project): - with pytest.raises(ParsingError) as exc: - run_dbt(["run"]) - assert "must begin with a letter" in str(exc.value) - - -class TestLongName: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": long_name_metrics_yml, - "people.sql": models_people_sql, - } - - def test_long_name(self, project): - with pytest.raises(ParsingError) as exc: - run_dbt(["run"]) - assert "cannot contain more than 250 characters" in str(exc.value) - - -class TestInvalidDerivedMetrics: - @pytest.fixture(scope="class") - def models(self): - return { - "derived_metric.yml": invalid_derived_metric_contains_model_yml, - "downstream_model.sql": downstream_model_sql, - } - - def test_invalid_derived_metrics(self, project): - with pytest.raises(ParsingError): - run_dbt(["run"]) - - -class TestMetricDependsOn: - @pytest.fixture(scope="class") - def models(self): - return { - "people.sql": models_people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_models.yml": semantic_model_people_yml, - "people_metrics.yml": models_people_metrics_yml, - } - - def test_metric_depends_on(self, project): - manifest = run_dbt(["parse"]) - assert isinstance(manifest, Manifest) - - expected_depends_on_for_number_of_people = ["semantic_model.test.semantic_people"] - expected_depends_on_for_average_tenure = [ - "metric.test.collective_tenure", - "metric.test.number_of_people", - ] - - number_of_people_metric = manifest.metrics["metric.test.number_of_people"] - assert number_of_people_metric.depends_on.nodes == expected_depends_on_for_number_of_people - - average_tenure_metric = manifest.metrics["metric.test.average_tenure"] - assert average_tenure_metric.depends_on.nodes == expected_depends_on_for_average_tenure - - -class TestDerivedMetric: - @pytest.fixture(scope="class") - def models(self): - return { - "downstream_model.sql": downstream_model_sql, - "purchasing.sql": purchasing_model_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_models.yml": semantic_model_purchasing_yml, - "derived_metric.yml": derived_metric_yml, - } - - # not strictly necessary to use "real" mock data for this test - # we just want to make sure that the 'metric' calls match our expectations - # but this sort of thing is possible, to have actual data flow through and validate results - @pytest.fixture(scope="class") - def seeds(self): - return { - "mock_purchase_data.csv": mock_purchase_data_csv, - } - - def test_derived_metric( - self, - project, - ): - # initial parse - results = run_dbt(["parse"]) - - # make sure all the metrics are in the manifest - manifest = get_manifest(project.project_root) - metric_ids = list(manifest.metrics.keys()) - expected_metric_ids = [ - "metric.test.count_orders", - "metric.test.sum_order_revenue", - "metric.test.average_order_value", - ] - assert metric_ids == expected_metric_ids - - # make sure the downstream_model depends on these metrics - metric_names = ["average_order_value", "count_orders", "sum_order_revenue"] - downstream_model = manifest.nodes["model.test.downstream_model"] - assert sorted(downstream_model.metrics) == [[metric_name] for metric_name in metric_names] - assert sorted(downstream_model.depends_on.nodes) == [ - "metric.test.average_order_value", - "metric.test.count_orders", - "metric.test.sum_order_revenue", - ] - assert sorted(downstream_model.config["metric_names"]) == metric_names - - # make sure the 'expression' metric depends on the two upstream metrics - derived_metric = manifest.metrics["metric.test.average_order_value"] - assert sorted(derived_metric.depends_on.nodes) == [ - "metric.test.count_orders", - "metric.test.sum_order_revenue", - ] - - # actually compile - results = run_dbt(["compile", "--select", "downstream_model"]) - compiled_code = results[0].node.compiled_code - - # make sure all these metrics properties show up in compiled SQL - for metric_name in manifest.metrics: - parsed_metric_node = manifest.metrics[metric_name] - for property in [ - "name", - "label", - "type", - "type_params", - "filter", - ]: - expected_value = getattr(parsed_metric_node, property) - assert f"{property}: {expected_value}" in compiled_code - - -class TestInvalidTimestampTimeGrainsMetrics: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": invalid_metric_without_timestamp_with_time_grains_yml, - "people.sql": models_people_sql, - } - - # Tests that we get a ParsingError with an invalid metric definition. - # This metric definition is missing timestamp but HAS a time_grains property - def test_simple_metric( - self, - project, - ): - # initial run - with pytest.raises(ParsingError): - run_dbt(["run"]) - - -class TestInvalidTimestampWindowMetrics: - @pytest.fixture(scope="class") - def models(self): - return { - "people_metrics.yml": invalid_metric_without_timestamp_with_window_yml, - "people.sql": models_people_sql, - } - - # Tests that we get a ParsingError with an invalid metric definition. - # This metric definition is missing timestamp but HAS a window property - def test_simple_metric( - self, - project, - ): - # initial run - with pytest.raises(ParsingError): - run_dbt(["run"]) - - -class TestConversionMetric: - @pytest.fixture(scope="class") - def models(self): - return { - "purchasing.sql": purchasing_model_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "semantic_models.yml": conversion_semantic_model_purchasing_yml, - "conversion_metric.yml": conversion_metric_yml, - } - - @pytest.fixture(scope="class") - def seeds(self): - return { - "mock_purchase_data.csv": mock_purchase_data_csv, - } - - def test_conversion_metric( - self, - project, - ): - # initial parse - runner = dbtRunner() - result = runner.invoke(["parse"]) - assert result.success - assert isinstance(result.result, Manifest) - - # make sure the metric is in the manifest - manifest = get_manifest(project.project_root) - metric_ids = list(manifest.metrics.keys()) - expected_metric_ids = [ - "metric.test.converted_orders_over_visits", - ] - assert metric_ids == expected_metric_ids - assert manifest.metrics[ - "metric.test.converted_orders_over_visits" - ].type_params.conversion_type_params - assert ( - len( - manifest.metrics[ - "metric.test.converted_orders_over_visits" - ].type_params.input_measures - ) - == 2 - ) - assert ( - manifest.metrics[ - "metric.test.converted_orders_over_visits" - ].type_params.conversion_type_params.window - is None - ) - assert ( - manifest.metrics[ - "metric.test.converted_orders_over_visits" - ].type_params.conversion_type_params.entity - == "purchase" - ) From 618c85d7108039abd868b328973fdd45784b4c79 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Wed, 20 Mar 2024 17:29:18 -0700 Subject: [PATCH 044/114] Change input references to make documentation recommendations. (#37) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> --- .github/workflows/integration-tests.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 874001c48..691029e31 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -71,8 +71,8 @@ jobs: shell: bash run: | ./.github/scripts/update_dev_packages.sh \ - ${{ github.event.inputs.dbt_adapters_branch }} \ - ${{ github.event.inputs.core_branch }} + ${{ inputs.dbt_adapters_branch }} \ + ${{ inputs.core_branch }} - name: Setup postgres shell: bash From 23880f420ddf73f96d51c47dacb4d063b0151a0f Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Thu, 21 Mar 2024 07:44:08 -0700 Subject: [PATCH 045/114] Add test to restore transactions behavior. (#24) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Mike Alfare <mike.alfare@dbtlabs.com> --- .../Under the Hood-20240226-225642.yaml | 6 ++++++ dbt/adapters/postgres/relation.py | 10 +++++----- tests/unit/test_renamed_relations.py | 16 ++++++++++++++++ 3 files changed, 27 insertions(+), 5 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20240226-225642.yaml create mode 100644 tests/unit/test_renamed_relations.py diff --git a/.changes/unreleased/Under the Hood-20240226-225642.yaml b/.changes/unreleased/Under the Hood-20240226-225642.yaml new file mode 100644 index 000000000..dd5d0645e --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240226-225642.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Add unit test for transaction semantics. +time: 2024-02-26T22:56:42.202429-08:00 +custom: + Author: versusfacit + Issue: "23" diff --git a/dbt/adapters/postgres/relation.py b/dbt/adapters/postgres/relation.py index 3f659fb4c..677b12ac6 100644 --- a/dbt/adapters/postgres/relation.py +++ b/dbt/adapters/postgres/relation.py @@ -1,4 +1,4 @@ -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import FrozenSet, Optional, Set from dbt.adapters.base.relation import BaseRelation @@ -20,19 +20,19 @@ @dataclass(frozen=True, eq=False, repr=False) class PostgresRelation(BaseRelation): - renameable_relations = frozenset( + renameable_relations: FrozenSet[RelationType] = field(default_factory=lambda: frozenset( { RelationType.View, RelationType.Table, RelationType.MaterializedView, } - ) - replaceable_relations = frozenset( + )) + replaceable_relations: FrozenSet[RelationType] = field(default_factory=lambda: frozenset( { RelationType.View, RelationType.Table, } - ) + )) def __post_init__(self): # Check for length of Postgres table/view names. diff --git a/tests/unit/test_renamed_relations.py b/tests/unit/test_renamed_relations.py new file mode 100644 index 000000000..49900d8ef --- /dev/null +++ b/tests/unit/test_renamed_relations.py @@ -0,0 +1,16 @@ +from dbt.adapters.postgres.relation import PostgresRelation +from dbt.adapters.contracts.relation import RelationType + + +def test_renameable_relation(): + relation = PostgresRelation.create( + database="my_db", + schema="my_schema", + identifier="my_table", + type=RelationType.Table, + ) + assert relation.renameable_relations == frozenset({ + RelationType.View, + RelationType.Table, + RelationType.MaterializedView, + }) From ee3da67db6f69baca576f6b758550257f8c1d575 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Tue, 26 Mar 2024 16:06:21 -0700 Subject: [PATCH 046/114] Add release internal workflow (#36) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> --- .github/workflows/release-internal.yml | 25 ++++++++++--------------- 1 file changed, 10 insertions(+), 15 deletions(-) diff --git a/.github/workflows/release-internal.yml b/.github/workflows/release-internal.yml index d560787ee..01f1a9e3d 100644 --- a/.github/workflows/release-internal.yml +++ b/.github/workflows/release-internal.yml @@ -1,19 +1,16 @@ # What? # -# Send a sha as a fully fledged relase to an internal archive for further processing. +# Tag and release an arbitrary ref. Uploads to an internal archive for further processing. # # How? # -# Checkout the sha -# Test it -# Build it -# Upload it +# After checking out and testing the provided ref, the image is built and uploaded. # # When? # -# Manual trigger +# Manual trigger. -name: Release internal patch +name: "Release internal patch" on: workflow_dispatch: @@ -22,10 +19,11 @@ on: description: "The release version number (i.e. 1.0.0b1)" type: string required: true - sha: - description: "The sha to use (leave empty to use latest on main)" + ref: + description: "The ref (sha or branch name) to use" type: string - required: false + default: "main" + required: true package_test_command: description: "Package test command" type: string @@ -36,19 +34,16 @@ defaults: run: shell: "bash" -env: - PYTHON_TARGET_VERSION: 3.11 - jobs: invoke-reusable-workflow: name: "Build and Release Internally" - uses: "dbt-labs/dbt-release/.github/workflows/internal-archive-release.yml@mp/finish_internal_workflow" + uses: "dbt-labs/dbt-release/.github/workflows/internal-archive-release.yml@main" with: version_number: "${{ inputs.version_number }}" package_test_command: "${{ inputs.package_test_command }}" dbms_name: "postgres" - sha: "${{ inputs.sha }}" + ref: "${{ inputs.ref }}" secrets: "inherit" From b35ce40e6d594a04c2dec7401c18378798e77782 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 28 Mar 2024 13:43:24 -0400 Subject: [PATCH 047/114] Pin `black>=24.3` (#40) --- .changes/unreleased/Security-20240327-193942.yaml | 6 ++++++ pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Security-20240327-193942.yaml diff --git a/.changes/unreleased/Security-20240327-193942.yaml b/.changes/unreleased/Security-20240327-193942.yaml new file mode 100644 index 000000000..66dee543d --- /dev/null +++ b/.changes/unreleased/Security-20240327-193942.yaml @@ -0,0 +1,6 @@ +kind: Security +body: Pin `black>=24.3` in `pyproject.toml` +time: 2024-03-27T19:39:42.633016-04:00 +custom: + Author: mikealfare + Issue: "40" diff --git a/pyproject.toml b/pyproject.toml index acef4e330..10fd7f7f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ dependencies = [ [tool.hatch.envs.lint] detached = true dependencies = [ - "black", + "black>=24.3", "flake8", "Flake8-pyproject", ] From 1c95a75a4cc5b03440a20bbf45c9c82e96a96e16 Mon Sep 17 00:00:00 2001 From: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Mon, 1 Apr 2024 20:07:27 -0700 Subject: [PATCH 048/114] revert default psycopg2 back to psycopg2-binary (#41) --- .../Dependencies-20240328-133507.yaml | 6 +++ hatch_build.py | 54 +++++++++++++++++++ pyproject.toml | 17 +++--- 3 files changed, 68 insertions(+), 9 deletions(-) create mode 100644 .changes/unreleased/Dependencies-20240328-133507.yaml create mode 100644 hatch_build.py diff --git a/.changes/unreleased/Dependencies-20240328-133507.yaml b/.changes/unreleased/Dependencies-20240328-133507.yaml new file mode 100644 index 000000000..c7dbd3198 --- /dev/null +++ b/.changes/unreleased/Dependencies-20240328-133507.yaml @@ -0,0 +1,6 @@ +kind: Dependencies +body: add "no-binary" install option +time: 2024-03-28T13:35:07.300121-07:00 +custom: + Author: colin-rogers-dbt + Issue: "6" diff --git a/hatch_build.py b/hatch_build.py new file mode 100644 index 000000000..02693cf0b --- /dev/null +++ b/hatch_build.py @@ -0,0 +1,54 @@ +import os +from typing import Any, Dict + +from hatchling.builders.config import BuilderConfig +from hatchling.builders.hooks.plugin.interface import BuildHookInterface +from hatchling.plugin import hookimpl + +BASE_DEPS = [ + # psycopg2 dependency installed in custom hatch_build.py + "dbt-adapters>=0.1.0a1,<2.0", + # installed via dbt-adapters but used directly + "dbt-common>=0.1.0a1,<2.0", + "agate>=1.0,<2.0", +] + +PSYCOPG2_MESSAGE = """ +No package name override was set. +Using 'psycopg2-binary' package to satisfy 'psycopg2' + +If you experience segmentation faults, silent crashes, or installation errors, +consider retrying with the 'DBT_PSYCOPG2_NAME' environment variable set to +'psycopg2'. It may require a compiler toolchain and development libraries! +""".strip() + + +def _dbt_psycopg2_name(): + # if the user chose something, use that + package_name = os.getenv("DBT_PSYCOPG2_NAME", "") + if package_name: + return package_name + + # default to psycopg2-binary for all OSes/versions + print(PSYCOPG2_MESSAGE) + return "psycopg2-binary" + + +class CustomBuildHook(BuildHookInterface[BuilderConfig]): + """ + Custom build hook to install psycopg2 instead of psycopg2-binary based on the presence of `DBT_PSYCOPG2_NAME` env + var. This is necessary as psycopg2-binary is better for local development, but psycopg2 is better for production. + """ + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + def initialize(self, version: str, build_data: Dict) -> None: + build_data["dependencies"] = BASE_DEPS + psycopg2_pkg_name = _dbt_psycopg2_name() + build_data["dependencies"].append(f"{psycopg2_pkg_name}>=2.9,<3.0") + + +@hookimpl +def hatch_register_build_hook(): + return CustomBuildHook diff --git a/pyproject.toml b/pyproject.toml index 10fd7f7f4..e9d880a2b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [project] -dynamic = ["version"] +dynamic = ["version", "dependencies"] name = "dbt-postgres" description = "The set of adapter protocols and base functionality that supports integration with dbt-core" readme = "README.md" @@ -22,13 +22,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ] -dependencies = [ - "dbt-adapters>=0.1.0a1,<2.0", - "psycopg2>=2.9,<3.0", - # installed via dbt-adapters but used directly - "dbt-common>=0.1.0a1,<2.0", - "agate>=1.0,<2.0", -] + [project.urls] Homepage = "https://github.com/dbt-labs/dbt-postgres" Documentation = "https://docs.getdbt.com" @@ -49,6 +43,9 @@ packages = ["dbt"] [tool.hatch.version] path = "dbt/adapters/postgres/__version__.py" +[tool.hatch.build.hooks.custom] +path = "./hatch_build.py" + [tool.hatch.envs.default] dependencies = [ "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", @@ -119,6 +116,8 @@ dependencies = [ "twine", "check-wheel-contents", ] + + [tool.hatch.envs.build.scripts] check-all = [ "- check-wheel", @@ -166,4 +165,4 @@ env_files = ["test.env"] testpaths = [ "tests/functional", "tests/unit", -] +] \ No newline at end of file From 5deb88864861acc5a367d391cfcbc1c207a0d41a Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 3 Apr 2024 15:53:34 -0400 Subject: [PATCH 049/114] Add `dbt-core~=1.8.0a1` as convenience dep (#44) --- .changes/unreleased/Dependencies-20240403-135902.yaml | 6 ++++++ hatch_build.py | 2 ++ tests/functional/shared_tests/test_query_comment.py | 8 +++++++- 3 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Dependencies-20240403-135902.yaml diff --git a/.changes/unreleased/Dependencies-20240403-135902.yaml b/.changes/unreleased/Dependencies-20240403-135902.yaml new file mode 100644 index 000000000..126b2178b --- /dev/null +++ b/.changes/unreleased/Dependencies-20240403-135902.yaml @@ -0,0 +1,6 @@ +kind: Dependencies +body: Add `dbt-core` as a dependency to preserve backwards compatibility for installation +time: 2024-04-03T13:59:02.539298-04:00 +custom: + Author: mikealfare + Issue: "44" diff --git a/hatch_build.py b/hatch_build.py index 02693cf0b..a44d06c74 100644 --- a/hatch_build.py +++ b/hatch_build.py @@ -8,6 +8,8 @@ BASE_DEPS = [ # psycopg2 dependency installed in custom hatch_build.py "dbt-adapters>=0.1.0a1,<2.0", + # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency + "dbt-core>=1.8.0a1", # installed via dbt-adapters but used directly "dbt-common>=0.1.0a1,<2.0", "agate>=1.0,<2.0", diff --git a/tests/functional/shared_tests/test_query_comment.py b/tests/functional/shared_tests/test_query_comment.py index 30399b249..ea2ddeaf6 100644 --- a/tests/functional/shared_tests/test_query_comment.py +++ b/tests/functional/shared_tests/test_query_comment.py @@ -6,6 +6,7 @@ BaseNullQueryComments, BaseEmptyQueryComments, ) +import pytest class TestQueryComments(BaseQueryComments): @@ -17,7 +18,12 @@ class TestMacroQueryComments(BaseMacroQueryComments): class TestMacroArgsQueryComments(BaseMacroArgsQueryComments): - pass + @pytest.mark.skip( + "This test is incorrectly comparing the version of `dbt-core`" + "to the version of `dbt-postgres`, which is not always the same." + ) + def test_matches_comment(self, project, get_package_version): + pass class TestMacroInvalidQueryComments(BaseMacroInvalidQueryComments): From 0b70805f8937a0e74d7076ec51342caa18723c08 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 3 Apr 2024 16:13:36 -0400 Subject: [PATCH 050/114] Add version bump workflow (#46) --- .github/workflows/version-bump.yml | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 .github/workflows/version-bump.yml diff --git a/.github/workflows/version-bump.yml b/.github/workflows/version-bump.yml new file mode 100644 index 000000000..bde34d683 --- /dev/null +++ b/.github/workflows/version-bump.yml @@ -0,0 +1,28 @@ +# **what?** +# This workflow will take the new version number to bump to. With that +# it will run versionbump to update the version number everywhere in the +# code base and then run changie to create the corresponding changelog. +# A PR will be created with the changes that can be reviewed before committing. + +# **why?** +# This is to aid in releasing dbt and making sure we have updated +# the version in all places and generated the changelog. + +# **when?** +# This is triggered manually + +name: Version Bump + +on: + workflow_dispatch: + inputs: + version_number: + description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)' + required: true + +jobs: + version_bump_and_changie: + uses: dbt-labs/actions/.github/workflows/version-bump.yml@main + with: + version_number: ${{ inputs.version_number }} + secrets: inherit # ok since what we are calling is internally maintained From 336d5921fa9f5a62f97306abf1c66873c519bfc9 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 3 Apr 2024 16:59:35 -0400 Subject: [PATCH 051/114] Manual version bump (#47) --- .changes/1.8.0-b2.md | 14 ++++++++++++++ .../Dependencies-20240328-133507.yaml | 0 .../Dependencies-20240403-135902.yaml | 0 .../Security-20240327-193942.yaml | 0 .../Under the Hood-20240226-225642.yaml | 0 CHANGELOG.md | 14 +++++++++++++- dbt/adapters/postgres/__version__.py | 2 +- 7 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 .changes/1.8.0-b2.md rename .changes/{unreleased => 1.8.0}/Dependencies-20240328-133507.yaml (100%) rename .changes/{unreleased => 1.8.0}/Dependencies-20240403-135902.yaml (100%) rename .changes/{unreleased => 1.8.0}/Security-20240327-193942.yaml (100%) rename .changes/{unreleased => 1.8.0}/Under the Hood-20240226-225642.yaml (100%) diff --git a/.changes/1.8.0-b2.md b/.changes/1.8.0-b2.md new file mode 100644 index 000000000..193206ccb --- /dev/null +++ b/.changes/1.8.0-b2.md @@ -0,0 +1,14 @@ +## dbt-postgres 1.8.0-b2 - April 03, 2024 + +### Under the Hood + +* Add unit test for transaction semantics. + +### Dependencies + +* add "no-binary" install option +* Add `dbt-core` as a dependency to preserve backwards compatibility for installation + +### Security + +* Pin `black>=24.3` in `pyproject.toml` diff --git a/.changes/unreleased/Dependencies-20240328-133507.yaml b/.changes/1.8.0/Dependencies-20240328-133507.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240328-133507.yaml rename to .changes/1.8.0/Dependencies-20240328-133507.yaml diff --git a/.changes/unreleased/Dependencies-20240403-135902.yaml b/.changes/1.8.0/Dependencies-20240403-135902.yaml similarity index 100% rename from .changes/unreleased/Dependencies-20240403-135902.yaml rename to .changes/1.8.0/Dependencies-20240403-135902.yaml diff --git a/.changes/unreleased/Security-20240327-193942.yaml b/.changes/1.8.0/Security-20240327-193942.yaml similarity index 100% rename from .changes/unreleased/Security-20240327-193942.yaml rename to .changes/1.8.0/Security-20240327-193942.yaml diff --git a/.changes/unreleased/Under the Hood-20240226-225642.yaml b/.changes/1.8.0/Under the Hood-20240226-225642.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240226-225642.yaml rename to .changes/1.8.0/Under the Hood-20240226-225642.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 5ca9d33a1..29a312343 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,5 +5,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), and is generated by [Changie](https://github.com/miniscruff/changie). +## dbt-postgres 1.8.0-b2 - April 03, 2024 -No releases yet, this file will be updated when generating your first release. +### Under the Hood + +* Add unit test for transaction semantics. + +### Dependencies + +* add "no-binary" install option +* Add `dbt-core` as a dependency to preserve backwards compatibility for installation + +### Security + +* Pin `black>=24.3` in `pyproject.toml` diff --git a/dbt/adapters/postgres/__version__.py b/dbt/adapters/postgres/__version__.py index 6496f3e22..7d16c28f0 100644 --- a/dbt/adapters/postgres/__version__.py +++ b/dbt/adapters/postgres/__version__.py @@ -1 +1 @@ -version = "1.8.0b1" +version = "1.8.0b2" From 461616fcdbb6dcff62a308b866d2e27dba820d21 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Fri, 12 Apr 2024 11:50:52 -0400 Subject: [PATCH 052/114] Update dependabot config to cover GHA (#56) --- .changes/unreleased/Under the Hood-20240410-180644.yaml | 6 ++++++ .github/dependabot.yml | 6 +++++- 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Under the Hood-20240410-180644.yaml diff --git a/.changes/unreleased/Under the Hood-20240410-180644.yaml b/.changes/unreleased/Under the Hood-20240410-180644.yaml new file mode 100644 index 000000000..2f7eeda61 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240410-180644.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Update dependabot configuration to cover GHA +time: 2024-04-10T18:06:44.884603-04:00 +custom: + Author: mikealfare + Issue: "56" diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 2a6f34492..4673f47cf 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,8 +1,12 @@ version: 2 updates: - # python dependencies - package-ecosystem: "pip" directory: "/" schedule: interval: "daily" rebase-strategy: "disabled" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + rebase-strategy: "disabled" From 32c8547ae42169f8147c5fbec2712c22b7f9db33 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Fri, 12 Apr 2024 19:19:05 -0400 Subject: [PATCH 053/114] Install `psycopg2` based on platform (#60) --- .../unreleased/Fixes-20240412-153154.yaml | 6 + .github/workflows/integration-tests.yml | 30 +- hatch_build.py | 56 - pyproject.toml | 17 +- tests/functional/partial_parsing/fixtures.py | 1228 ----------------- .../partial_parsing/test_file_diff.py | 63 - .../partial_parsing/test_partial_parsing.py | 824 ----------- .../test_pp_disabled_config.py | 224 --- .../partial_parsing/test_pp_docs.py | 257 ---- .../partial_parsing/test_pp_groups.py | 155 --- .../partial_parsing/test_pp_metrics.py | 85 -- .../partial_parsing/test_pp_vars.py | 398 ------ .../partial_parsing/test_versioned_models.py | 128 -- 13 files changed, 47 insertions(+), 3424 deletions(-) create mode 100644 .changes/unreleased/Fixes-20240412-153154.yaml delete mode 100644 hatch_build.py delete mode 100644 tests/functional/partial_parsing/fixtures.py delete mode 100644 tests/functional/partial_parsing/test_file_diff.py delete mode 100644 tests/functional/partial_parsing/test_partial_parsing.py delete mode 100644 tests/functional/partial_parsing/test_pp_disabled_config.py delete mode 100644 tests/functional/partial_parsing/test_pp_docs.py delete mode 100644 tests/functional/partial_parsing/test_pp_groups.py delete mode 100644 tests/functional/partial_parsing/test_pp_metrics.py delete mode 100644 tests/functional/partial_parsing/test_pp_vars.py delete mode 100644 tests/functional/partial_parsing/test_versioned_models.py diff --git a/.changes/unreleased/Fixes-20240412-153154.yaml b/.changes/unreleased/Fixes-20240412-153154.yaml new file mode 100644 index 000000000..10bac271f --- /dev/null +++ b/.changes/unreleased/Fixes-20240412-153154.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Determine `psycopg2` based on `platform_system` (Linux or other) +time: 2024-04-12T15:31:54.861201-04:00 +custom: + Author: mikealfare + Issue: "60" diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 691029e31..6bb37ec51 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -39,6 +39,10 @@ concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} cancel-in-progress: true +defaults: + run: + shell: bash + jobs: integration: name: Integration Tests @@ -68,14 +72,12 @@ jobs: - name: Update Adapters and Core branches if: ${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch'}} - shell: bash run: | ./.github/scripts/update_dev_packages.sh \ ${{ inputs.dbt_adapters_branch }} \ ${{ inputs.core_branch }} - name: Setup postgres - shell: bash run: psql -f ./scripts/setup_test_database.sql env: PGHOST: localhost @@ -106,3 +108,27 @@ jobs: source-file: "results.csv" file-name: "integration_results" python-version: ${{ matrix.python-version }} + + psycopg2-check: + name: "Test psycopg2 build version" + runs-on: ${{ matrix.scenario.platform }} + strategy: + fail-fast: false + matrix: + scenario: + - {platform: ubuntu-latest, psycopg2-name: psycopg2} + - {platform: macos-latest, psycopg2-name: psycopg2-binary} + steps: + - name: "Check out repository" + uses: actions/checkout@v4 + + - name: "Test psycopg2 name" + run: | + python -m pip install . + PSYCOPG2_PIP_ENTRY=$(pip list | grep "psycopg2 " || pip list | grep psycopg2-binary) + echo $PSYCOPG2_PIP_ENTRY + PSYCOPG2_NAME="${PSYCOPG2_PIP_ENTRY%% *}" + echo $PSYCOPG2_NAME + if [[ "${PSYCOPG2_NAME}" != "${{ matrix.scenario.psycopg2-name }}" ]]; then + exit 1 + fi diff --git a/hatch_build.py b/hatch_build.py deleted file mode 100644 index a44d06c74..000000000 --- a/hatch_build.py +++ /dev/null @@ -1,56 +0,0 @@ -import os -from typing import Any, Dict - -from hatchling.builders.config import BuilderConfig -from hatchling.builders.hooks.plugin.interface import BuildHookInterface -from hatchling.plugin import hookimpl - -BASE_DEPS = [ - # psycopg2 dependency installed in custom hatch_build.py - "dbt-adapters>=0.1.0a1,<2.0", - # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency - "dbt-core>=1.8.0a1", - # installed via dbt-adapters but used directly - "dbt-common>=0.1.0a1,<2.0", - "agate>=1.0,<2.0", -] - -PSYCOPG2_MESSAGE = """ -No package name override was set. -Using 'psycopg2-binary' package to satisfy 'psycopg2' - -If you experience segmentation faults, silent crashes, or installation errors, -consider retrying with the 'DBT_PSYCOPG2_NAME' environment variable set to -'psycopg2'. It may require a compiler toolchain and development libraries! -""".strip() - - -def _dbt_psycopg2_name(): - # if the user chose something, use that - package_name = os.getenv("DBT_PSYCOPG2_NAME", "") - if package_name: - return package_name - - # default to psycopg2-binary for all OSes/versions - print(PSYCOPG2_MESSAGE) - return "psycopg2-binary" - - -class CustomBuildHook(BuildHookInterface[BuilderConfig]): - """ - Custom build hook to install psycopg2 instead of psycopg2-binary based on the presence of `DBT_PSYCOPG2_NAME` env - var. This is necessary as psycopg2-binary is better for local development, but psycopg2 is better for production. - """ - - def __init__(self, *args: Any, **kwargs: Any) -> None: - super().__init__(*args, **kwargs) - - def initialize(self, version: str, build_data: Dict) -> None: - build_data["dependencies"] = BASE_DEPS - psycopg2_pkg_name = _dbt_psycopg2_name() - build_data["dependencies"].append(f"{psycopg2_pkg_name}>=2.9,<3.0") - - -@hookimpl -def hatch_register_build_hook(): - return CustomBuildHook diff --git a/pyproject.toml b/pyproject.toml index e9d880a2b..996ea46ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [project] -dynamic = ["version", "dependencies"] +dynamic = ["version"] name = "dbt-postgres" description = "The set of adapter protocols and base functionality that supports integration with dbt-core" readme = "README.md" @@ -22,6 +22,18 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ] +dependencies = [ + # install `psycopg2` on linux (assumed production) + 'psycopg2>=2.9,<3.0; platform_system == "Linux"', + # install `psycopg2-binary` on macos/windows (assumed development) + 'psycopg2-binary>=2.9,<3.0; platform_system != "Linux"', + "dbt-adapters>=0.1.0a1,<2.0", + # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency + "dbt-core>=1.8.0a1", + # installed via dbt-adapters but used directly + "dbt-common>=0.1.0a1,<2.0", + "agate>=1.0,<2.0", +] [project.urls] Homepage = "https://github.com/dbt-labs/dbt-postgres" @@ -43,9 +55,6 @@ packages = ["dbt"] [tool.hatch.version] path = "dbt/adapters/postgres/__version__.py" -[tool.hatch.build.hooks.custom] -path = "./hatch_build.py" - [tool.hatch.envs.default] dependencies = [ "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", diff --git a/tests/functional/partial_parsing/fixtures.py b/tests/functional/partial_parsing/fixtures.py deleted file mode 100644 index f76d90ad2..000000000 --- a/tests/functional/partial_parsing/fixtures.py +++ /dev/null @@ -1,1228 +0,0 @@ -local_dependency__dbt_project_yml = """ - -name: 'local_dep' -version: '1.0' -config-version: 2 - -profile: 'default' - -model-paths: ["models"] -analysis-paths: ["analyses"] -test-paths: ["tests"] -seed-paths: ["seeds"] -macro-paths: ["macros"] - -require-dbt-version: '>=0.1.0' - -target-path: "target" # directory which will store compiled SQL files -clean-targets: # directories to be removed by `dbt clean` - - "target" - - "dbt_packages" - - -seeds: - quote_columns: False - -""" - -local_dependency__models__schema_yml = """ -sources: - - name: seed_source - schema: "{{ var('schema_override', target.schema) }}" - tables: - - name: "seed" - columns: - - name: id - data_tests: - - unique - -""" - -local_dependency__models__model_to_import_sql = """ -select * from {{ ref('seed') }} - -""" - -local_dependency__macros__dep_macro_sql = """ -{% macro some_overridden_macro() -%} -100 -{%- endmacro %} - -""" - -local_dependency__seeds__seed_csv = """id -1 -""" - -empty_schema_with_version_yml = """ - -""" - -schema_sources5_yml = """ - -sources: - - name: seed_sources - schema: "{{ target.schema }}" - tables: - - name: raw_customers - columns: - - name: id - data_tests: - - not_null: - severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - - unique - - name: first_name - - name: last_name - - name: email - -seeds: - - name: rad_customers - description: "Raw customer data" - columns: - - name: id - data_tests: - - unique - - not_null - - name: first_name - - name: last_name - - name: email - - -""" - -my_macro2_sql = """ -{% macro do_something(foo2, bar2) %} - - select - 'foo' as foo2, - 'var' as bar2 - -{% endmacro %} - -""" - -raw_customers_csv = """id,first_name,last_name,email -1,Michael,Perez,mperez0@chronoengine.com -2,Shawn,Mccoy,smccoy1@reddit.com -3,Kathleen,Payne,kpayne2@cargocollective.com -4,Jimmy,Cooper,jcooper3@cargocollective.com -5,Katherine,Rice,krice4@typepad.com -6,Sarah,Ryan,sryan5@gnu.org -7,Martin,Mcdonald,mmcdonald6@opera.com -8,Frank,Robinson,frobinson7@wunderground.com -9,Jennifer,Franklin,jfranklin8@mail.ru -10,Henry,Welch,hwelch9@list-manage.com -""" - -model_three_disabled2_sql = """ -- Disabled model -{{ config(materialized='table', enabled=False) }} - -with source_data as ( - - select 1 as id - union all - select null as id - -) - -select * -from source_data - -""" - -schema_sources4_yml = """ - -sources: - - name: seed_sources - schema: "{{ target.schema }}" - tables: - - name: raw_customers - columns: - - name: id - data_tests: - - not_null: - severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - - unique - - every_value_is_blue - - name: first_name - - name: last_name - - name: email - -seeds: - - name: raw_customers - description: "Raw customer data" - columns: - - name: id - data_tests: - - unique - - not_null - - name: first_name - - name: last_name - - name: email - - -""" - -env_var_schema_yml = """ - -models: - - name: model_one - config: - materialized: "{{ env_var('TEST_SCHEMA_VAR') }}" - -""" - -my_test_sql = """ -select - * from {{ ref('customers') }} where first_name = '{{ macro_something() }}' - -""" - -empty_schema_yml = """ - -""" - -schema_models_c_yml = """ - -sources: - - name: seed_source - description: "This is a source override" - overrides: local_dep - schema: "{{ var('schema_override', target.schema) }}" - tables: - - name: "seed" - columns: - - name: id - data_tests: - - unique - - not_null - -""" - -env_var_sources_yml = """ -sources: - - name: seed_sources - schema: "{{ target.schema }}" - database: "{{ env_var('ENV_VAR_DATABASE') }}" - tables: - - name: raw_customers - columns: - - name: id - data_tests: - - not_null: - severity: "{{ env_var('ENV_VAR_SEVERITY') }}" - - unique - - name: first_name - - name: last_name - - name: email - - - -""" - -generic_test_edited_sql = """ -{% test is_odd(model, column_name) %} - -with validation as ( - - select - {{ column_name }} as odd_field2 - - from {{ model }} - -), - -validation_errors as ( - - select - odd_field2 - - from validation - -- if this is true, then odd_field is actually even! - where (odd_field2 % 2) = 0 - -) - -select * -from validation_errors - -{% endtest %} -""" - -schema_sources1_yml = """ -sources: - - name: seed_sources - schema: "{{ target.schema }}" - tables: - - name: raw_customers - columns: - - name: id - data_tests: - - not_null: - severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - - unique - - name: first_name - - name: last_name - - name: email - - - -""" - -schema_sources3_yml = """ - -sources: - - name: seed_sources - schema: "{{ target.schema }}" - tables: - - name: raw_customers - columns: - - name: id - data_tests: - - not_null: - severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - - unique - - name: first_name - - name: last_name - - name: email - -exposures: - - name: proxy_for_dashboard - description: "This is for the XXX dashboard" - type: "dashboard" - owner: - name: "Dashboard Tester" - email: "tester@dashboard.com" - depends_on: - - ref("model_one") - - source("seed_sources", "raw_customers") - - -""" - -my_analysis_sql = """ -select * from customers - -""" - -schema_sources2_yml = """ - -sources: - - name: seed_sources - schema: "{{ target.schema }}" - tables: - - name: raw_customers - columns: - - name: id - data_tests: - - not_null: - severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - - unique - - name: first_name - - name: last_name - - name: email - -exposures: - - name: proxy_for_dashboard - description: "This is for the XXX dashboard" - type: "dashboard" - owner: - name: "Dashboard Tester" - email: "tester@dashboard.com" - depends_on: - - ref("model_one") - - ref("raw_customers") - - source("seed_sources", "raw_customers") - - -""" - -model_color_sql = """ -select 'blue' as fun - -""" - -my_metric_yml = """ -metrics: - - name: new_customers - label: New Customers - model: customers - description: "The number of paid customers who are using the product" - type: simple - type_params: - measure: - name: customers - filter: "{{ Dimension('id__loves_dbt') }} is true" - +meta: - is_okr: True - tags: - - okrs - - - -""" - -env_var_schema2_yml = """ - -models: - - name: model_one - config: - materialized: "{{ env_var('TEST_SCHEMA_VAR') }}" - data_tests: - - check_color: - column_name: fun - color: "env_var('ENV_VAR_COLOR')" - - -""" - -gsm_override_sql = """ -- custom macro -{% macro generate_schema_name(schema_name, node) %} - - {{ schema_name }}_{{ target.schema }} - -{% endmacro %} - -""" - -model_four1_sql = """ -select * from {{ ref('model_three') }} - -""" - -model_one_sql = """ -select 1 as fun - -""" - -metricflow_time_spine_sql = """ -SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day -""" - -env_var_schema3_yml = """ - -models: - - name: model_one - config: - materialized: "{{ env_var('TEST_SCHEMA_VAR') }}" - data_tests: - - check_color: - column_name: fun - color: "env_var('ENV_VAR_COLOR')" - -exposures: - - name: proxy_for_dashboard - description: "This is for the XXX dashboard" - type: "dashboard" - owner: - name: "{{ env_var('ENV_VAR_OWNER') }}" - email: "tester@dashboard.com" - depends_on: - - ref("model_color") - - source("seed_sources", "raw_customers") - -""" - -people_semantic_models_yml = """ -version: 2 - -semantic_models: - - name: semantic_people - model: ref('people') - dimensions: - - name: favorite_color - type: categorical - - name: created_at - type: TIME - type_params: - time_granularity: day - measures: - - name: years_tenure - agg: SUM - expr: tenure - - name: people - agg: count - expr: id - entities: - - name: id - type: primary - defaults: - agg_time_dimension: created_at -""" - -env_var_metrics_yml = """ - -metrics: - - - name: number_of_people - description: Total count of people - label: "Number of people" - type: simple - type_params: - measure: people - meta: - my_meta: '{{ env_var("ENV_VAR_METRICS") }}' - - - name: collective_tenure - description: Total number of years of team experience - label: "Collective tenure" - type: simple - type_params: - measure: - name: years_tenure - filter: "{{ Dimension('id__loves_dbt') }} is true" - -""" - -customers_sql = """ -with source as ( - - select * from {{ source('seed_sources', 'raw_customers') }} - -), - -renamed as ( - - select - id as customer_id, - first_name, - last_name, - email - - from source - -) - -select * from renamed - -""" - -model_four2_sql = """ -select fun from {{ ref('model_one') }} - -""" - -env_var_model_sql = """ -select '{{ env_var('ENV_VAR_TEST') }}' as vartest - -""" - -env_var_model_one_sql = """ -select 'blue' as fun - -""" - -custom_schema_tests2_sql = """ -{% test type_one(model) %} - - select * from ( - - select * from {{ model }} - union all - select * from {{ ref('model_b') }} - - ) as Foo - -{% endtest %} - -{% test type_two(model) %} - - {{ config(severity = "ERROR") }} - - select * from {{ model }} - -{% endtest %} - -""" - -metric_model_a_sql = """ -{% - set metric_list = [ - metric('number_of_people'), - metric('collective_tenure') - ] -%} - -{% if not execute %} - - {% set metric_names = [] %} - {% for m in metric_list %} - {% do metric_names.append(m.metric_name) %} - {% endfor %} - - -- this config does nothing, but it lets us check these values - {{ config(metric_names = metric_names) }} - -{% endif %} - - -select 1 as fun - -""" - -model_b_sql = """ -select 1 as notfun - -""" - -customers2_md = """ -{% docs customer_table %} - -LOTS of customer data - -{% enddocs %} - -""" - -custom_schema_tests1_sql = """ -{% test type_one(model) %} - - select * from ( - - select * from {{ model }} - union all - select * from {{ ref('model_b') }} - - ) as Foo - -{% endtest %} - -{% test type_two(model) %} - - {{ config(severity = "WARN") }} - - select * from {{ model }} - -{% endtest %} - -""" - -people_metrics_yml = """ - -metrics: - - - name: number_of_people - description: Total count of people - label: "Number of people" - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' - - - name: collective_tenure - description: Total number of years of team experience - label: "Collective tenure" - type: simple - type_params: - measure: - name: years_tenure - filter: "{{ Dimension('id__loves_dbt') }} is true" - -""" - -people_sql = """ -select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at -union all -select 1 as id, 'Jeremy' as first_name, 'Cohen' as last_name, 'indigo' as favorite_color, true as loves_dbt, 4 as tenure, current_timestamp as created_at - -""" - -orders_sql = """ -select 1 as id, 101 as user_id, 'pending' as status - -""" - -orders_downstream_sql = """ -select * from {{ ref('orders') }} - -""" - -model_a_sql = """ -select 1 as fun - -""" - -model_three_disabled_sql = """ -{{ config(materialized='table', enabled=False) }} - -with source_data as ( - - select 1 as id - union all - select null as id - -) - -select * -from source_data - -""" - -models_schema2b_yml = """ - -models: - - name: model_one - description: "The first model" - - name: model_three - description: "The third model" - columns: - - name: id - data_tests: - - not_null - -""" - -env_var_macros_yml = """ -macros: - - name: do_something - description: "This is a test macro" - meta: - some_key: "{{ env_var('ENV_VAR_SOME_KEY') }}" - - -""" - -models_schema4_yml = """ - -models: - - name: model_one - description: "The first model" - - name: model_three - description: "The third model" - config: - enabled: false - columns: - - name: id - data_tests: - - unique - -""" - -model_two_sql = """ -select 1 as notfun - -""" - -generic_test_schema_yml = """ - -models: - - name: orders - description: "Some order data" - columns: - - name: id - data_tests: - - unique - - is_odd - -""" - -customers1_md = """ -{% docs customer_table %} - -This table contains customer data - -{% enddocs %} - -""" - -model_three_modified_sql = """ -{{ config(materialized='table') }} - -with source_data as ( - - {#- This is model three #} - - select 1 as id - union all - select null as id - -) - -select * -from source_data - -""" - -macros_yml = """ -macros: - - name: do_something - description: "This is a test macro" - -""" - -test_color_sql = """ -{% test check_color(model, column_name, color) %} - - select * - from {{ model }} - where {{ column_name }} = '{{ color }}' - -{% endtest %} - -""" - -models_schema2_yml = """ - -models: - - name: model_one - description: "The first model" - - name: model_three - description: "The third model" - columns: - - name: id - data_tests: - - unique - -""" - -gsm_override2_sql = """ -- custom macro xxxx -{% macro generate_schema_name(schema_name, node) %} - - {{ schema_name }}_{{ target.schema }} - -{% endmacro %} - -""" - -models_schema3_yml = """ - -models: - - name: model_one - description: "The first model" - - name: model_three - description: "The third model" - data_tests: - - unique -macros: - - name: do_something - description: "This is a test macro" - -""" - -generic_test_sql = """ -{% test is_odd(model, column_name) %} - -with validation as ( - - select - {{ column_name }} as odd_field - - from {{ model }} - -), - -validation_errors as ( - - select - odd_field - - from validation - -- if this is true, then odd_field is actually even! - where (odd_field % 2) = 0 - -) - -select * -from validation_errors - -{% endtest %} -""" - -env_var_model_test_yml = """ -models: - - name: model_color - columns: - - name: fun - data_tests: - - unique: - enabled: "{{ env_var('ENV_VAR_ENABLED', True) }}" - -""" - -model_three_sql = """ -{{ config(materialized='table') }} - -with source_data as ( - - select 1 as id - union all - select null as id - -) - -select * -from source_data - -""" - -ref_override2_sql = """ -- Macro to override ref xxxx -{% macro ref(modelname) %} -{% do return(builtins.ref(modelname)) %} -{% endmacro %} - -""" - -models_schema1_yml = """ - -models: - - name: model_one - description: "The first model" - -""" - -macros_schema_yml = """ - - -models: - - name: model_a - data_tests: - - type_one - - type_two - -""" - -models_versions_schema_yml = """ - -models: - - name: model_one - description: "The first model" - versions: - - v: 1 - - v: 2 -""" - -models_versions_defined_in_schema_yml = """ - -models: - - name: model_one - description: "The first model" - versions: - - v: 1 - - v: 2 - defined_in: model_one_different -""" - -models_versions_updated_schema_yml = """ - -models: - - name: model_one - latest_version: 1 - description: "The first model" - versions: - - v: 1 - - v: 2 - defined_in: model_one_different -""" - -my_macro_sql = """ -{% macro do_something(foo2, bar2) %} - - select - '{{ foo2 }}' as foo2, - '{{ bar2 }}' as bar2 - -{% endmacro %} - -""" - -snapshot_sql = """ -{% snapshot orders_snapshot %} - -{{ - config( - target_schema=schema, - strategy='check', - unique_key='id', - check_cols=['status'], - ) -}} - -select * from {{ ref('orders') }} - -{% endsnapshot %} - -{% snapshot orders2_snapshot %} - -{{ - config( - target_schema=schema, - strategy='check', - unique_key='id', - check_cols=['order_date'], - ) -}} - -select * from {{ ref('orders') }} - -{% endsnapshot %} - -""" - -models_schema4b_yml = """ - -models: - - name: model_one - description: "The first model" - - name: model_three - description: "The third model" - config: - enabled: true - columns: - - name: id - data_tests: - - unique - -""" - -test_macro_sql = """ -{% macro macro_something() %} - - {% do return('macro_something') %} - -{% endmacro %} - -""" - -people_metrics2_yml = """ - -metrics: - - - name: number_of_people - description: Total count of people - label: "Number of people" - type: simple - type_params: - measure: people - meta: - my_meta: 'replaced' - - - name: collective_tenure - description: Total number of years of team experience - label: "Collective tenure" - type: simple - type_params: - measure: - name: years_tenure - filter: "{{ Dimension('id__loves_dbt') }} is true" - -""" - -generic_schema_yml = """ - -models: - - name: orders - description: "Some order data" - columns: - - name: id - data_tests: - - unique - -""" - - -groups_schema_yml_one_group = """ - -groups: - - name: test_group - owner: - name: test_group_owner - -models: - - name: orders - description: "Some order data" -""" - - -groups_schema_yml_two_groups = """ - -groups: - - name: test_group - owner: - name: test_group_owner - - name: test_group2 - owner: - name: test_group_owner2 - -models: - - name: orders - description: "Some order data" -""" - - -groups_schema_yml_two_groups_private_orders_valid_access = """ - -groups: - - name: test_group - owner: - name: test_group_owner - - name: test_group2 - owner: - name: test_group_owner2 - -models: - - name: orders - group: test_group - access: private - description: "Some order data" - - name: orders_downstream - group: test_group - description: "Some order data" -""" - -groups_schema_yml_two_groups_private_orders_invalid_access = """ - -groups: - - name: test_group - owner: - name: test_group_owner - - name: test_group2 - owner: - name: test_group_owner2 - -models: - - name: orders - group: test_group2 - access: private - description: "Some order data" - - name: orders_downstream - group: test_group - description: "Some order data" -""" - -groups_schema_yml_one_group_model_in_group2 = """ - -groups: - - name: test_group - owner: - name: test_group_owner - -models: - - name: orders - description: "Some order data" - config: - group: test_group2 -""" - -groups_schema_yml_two_groups_edited = """ - -groups: - - name: test_group - owner: - name: test_group_owner - - name: test_group2_edited - owner: - name: test_group_owner2 - -models: - - name: orders - description: "Some order data" -""" - - -snapshot2_sql = """ -- add a comment -{% snapshot orders_snapshot %} - -{{ - config( - target_schema=schema, - strategy='check', - unique_key='id', - check_cols=['status'], - ) -}} - -select * from {{ ref('orders') }} - -{% endsnapshot %} - -{% snapshot orders2_snapshot %} - -{{ - config( - target_schema=schema, - strategy='check', - unique_key='id', - check_cols=['order_date'], - ) -}} - -select * from {{ ref('orders') }} - -{% endsnapshot %} - -""" - -sources_tests2_sql = """ - -{% test every_value_is_blue(model, column_name) %} - - select * - from {{ model }} - where {{ column_name }} != 99 - -{% endtest %} - - -""" - -people_metrics3_yml = """ - -metrics: - - - name: number_of_people - description: Total count of people - label: "Number of people" - type: simple - type_params: - measure: people - meta: - my_meta: 'replaced' - -""" - -ref_override_sql = """ -- Macro to override ref -{% macro ref(modelname) %} -{% do return(builtins.ref(modelname)) %} -{% endmacro %} - -""" - -test_macro2_sql = """ -{% macro macro_something() %} - - {% do return('some_name') %} - -{% endmacro %} - -""" - -env_var_macro_sql = """ -{% macro do_something(foo2, bar2) %} - - select - '{{ foo2 }}' as foo2, - '{{ bar2 }}' as bar2 - -{% endmacro %} - -""" - -sources_tests1_sql = """ - -{% test every_value_is_blue(model, column_name) %} - - select * - from {{ model }} - where {{ column_name }} = 9999 - -{% endtest %} - - -""" diff --git a/tests/functional/partial_parsing/test_file_diff.py b/tests/functional/partial_parsing/test_file_diff.py deleted file mode 100644 index c7e34780f..000000000 --- a/tests/functional/partial_parsing/test_file_diff.py +++ /dev/null @@ -1,63 +0,0 @@ -import os - -from dbt.tests.util import run_dbt, write_artifact, write_file -import pytest - -from tests.functional.partial_parsing.fixtures import model_one_sql, model_two_sql - - -first_file_diff = { - "deleted": [], - "changed": [], - "added": [{"path": "models/model_one.sql", "content": "select 1 as fun"}], -} - - -second_file_diff = { - "deleted": [], - "changed": [], - "added": [{"path": "models/model_two.sql", "content": "select 123 as notfun"}], -} - - -class TestFileDiffPaths: - def test_file_diffs(self, project): - os.environ["DBT_PP_FILE_DIFF_TEST"] = "true" - - run_dbt(["deps"]) - run_dbt(["seed"]) - - # We start with an empty project - results = run_dbt() - - write_artifact(first_file_diff, "file_diff.json") - results = run_dbt() - assert len(results) == 1 - - write_artifact(second_file_diff, "file_diff.json") - results = run_dbt() - assert len(results) == 2 - - -class TestFileDiffs: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - } - - def test_no_file_diffs(self, project): - # We start with a project with one model - manifest = run_dbt(["parse"]) - assert len(manifest.nodes) == 1 - - # add a model file - write_file(model_two_sql, project.project_root, "models", "model_two.sql") - - # parse without computing a file diff - manifest = run_dbt(["--partial-parse", "--no-partial-parse-file-diff", "parse"]) - assert len(manifest.nodes) == 1 - - # default behaviour - parse with computing a file diff - manifest = run_dbt(["--partial-parse", "parse"]) - assert len(manifest.nodes) == 2 diff --git a/tests/functional/partial_parsing/test_partial_parsing.py b/tests/functional/partial_parsing/test_partial_parsing.py deleted file mode 100644 index eb09dd32b..000000000 --- a/tests/functional/partial_parsing/test_partial_parsing.py +++ /dev/null @@ -1,824 +0,0 @@ -import os -import re -from unittest import mock - -from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import ( - get_manifest, - rename_dir, - rm_file, - write_file, -) -from dbt.contracts.files import ParseFileType -from dbt.contracts.results import TestStatus -from dbt.plugins.manifest import ModelNodeArgs, PluginNodes -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.partial_parsing.fixtures import ( - custom_schema_tests1_sql, - custom_schema_tests2_sql, - customers_sql, - customers1_md, - customers2_md, - empty_schema_with_version_yml, - empty_schema_yml, - generic_schema_yml, - generic_test_edited_sql, - generic_test_schema_yml, - generic_test_sql, - gsm_override_sql, - gsm_override2_sql, - local_dependency__dbt_project_yml, - local_dependency__macros__dep_macro_sql, - local_dependency__models__model_to_import_sql, - local_dependency__models__schema_yml, - local_dependency__seeds__seed_csv, - macros_schema_yml, - macros_yml, - model_a_sql, - model_b_sql, - model_four1_sql, - model_four2_sql, - model_one_sql, - model_three_disabled_sql, - model_three_disabled2_sql, - model_three_modified_sql, - model_three_sql, - model_two_sql, - models_schema1_yml, - models_schema2_yml, - models_schema2b_yml, - models_schema3_yml, - models_schema4_yml, - models_schema4b_yml, - my_analysis_sql, - my_macro_sql, - my_macro2_sql, - my_test_sql, - orders_sql, - raw_customers_csv, - ref_override_sql, - ref_override2_sql, - schema_models_c_yml, - schema_sources1_yml, - schema_sources2_yml, - schema_sources3_yml, - schema_sources4_yml, - schema_sources5_yml, - snapshot_sql, - snapshot2_sql, - sources_tests1_sql, - sources_tests2_sql, - test_macro_sql, - test_macro2_sql, -) -from tests.functional.utils import ( - run_dbt, - run_dbt_and_capture, - up_one, -) - - -os.environ["DBT_PP_TEST"] = "true" - - -def normalize(path): - return os.path.normcase(os.path.normpath(path)) - - -class TestModels: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - } - - def test_pp_models(self, project): - # initial run - # run_dbt(['clean']) - results = run_dbt(["run"]) - assert len(results) == 1 - - # add a model file - write_file(model_two_sql, project.project_root, "models", "model_two.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - - # add a schema file - write_file(models_schema1_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - assert "model.test.model_one" in manifest.nodes - model_one_node = manifest.nodes["model.test.model_one"] - assert model_one_node.description == "The first model" - assert model_one_node.patch_path == "test://" + normalize("models/schema.yml") - - # add a model and a schema file (with a test) at the same time - write_file(models_schema2_yml, project.project_root, "models", "schema.yml") - write_file(model_three_sql, project.project_root, "models", "model_three.sql") - results = run_dbt(["--partial-parse", "test"], expect_pass=False) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - project_files = [f for f in manifest.files if f.startswith("test://")] - assert len(project_files) == 4 - model_3_file_id = "test://" + normalize("models/model_three.sql") - assert model_3_file_id in manifest.files - model_three_file = manifest.files[model_3_file_id] - assert model_three_file.parse_file_type == ParseFileType.Model - assert type(model_three_file).__name__ == "SourceFile" - model_three_node = manifest.nodes[model_three_file.nodes[0]] - schema_file_id = "test://" + normalize("models/schema.yml") - assert model_three_node.patch_path == schema_file_id - assert model_three_node.description == "The third model" - schema_file = manifest.files[schema_file_id] - assert type(schema_file).__name__ == "SchemaSourceFile" - assert len(schema_file.data_tests) == 1 - tests = schema_file.get_all_test_ids() - assert tests == ["test.test.unique_model_three_id.6776ac8160"] - unique_test_id = tests[0] - assert unique_test_id in manifest.nodes - - # modify model sql file, ensure description still there - write_file(model_three_modified_sql, project.project_root, "models", "model_three.sql") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - model_id = "model.test.model_three" - assert model_id in manifest.nodes - model_three_node = manifest.nodes[model_id] - assert model_three_node.description == "The third model" - - # Change the model 3 test from unique to not_null - write_file(models_schema2b_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "test"], expect_pass=False) - manifest = get_manifest(project.project_root) - schema_file_id = "test://" + normalize("models/schema.yml") - schema_file = manifest.files[schema_file_id] - tests = schema_file.get_all_test_ids() - assert tests == ["test.test.not_null_model_three_id.3162ce0a6f"] - not_null_test_id = tests[0] - assert not_null_test_id in manifest.nodes.keys() - assert unique_test_id not in manifest.nodes.keys() - assert len(results) == 1 - - # go back to previous version of schema file, removing patch, test, and model for model three - write_file(models_schema1_yml, project.project_root, "models", "schema.yml") - rm_file(project.project_root, "models", "model_three.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - - # remove schema file, still have 3 models - write_file(model_three_sql, project.project_root, "models", "model_three.sql") - rm_file(project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - manifest = get_manifest(project.project_root) - schema_file_id = "test://" + normalize("models/schema.yml") - assert schema_file_id not in manifest.files - project_files = [f for f in manifest.files if f.startswith("test://")] - assert len(project_files) == 3 - - # Put schema file back and remove a model - # referred to in schema file - write_file(models_schema2_yml, project.project_root, "models", "schema.yml") - rm_file(project.project_root, "models", "model_three.sql") - with pytest.raises(CompilationError): - results = run_dbt(["--partial-parse", "--warn-error", "run"]) - - # Put model back again - write_file(model_three_sql, project.project_root, "models", "model_three.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # Add model four refing model three - write_file(model_four1_sql, project.project_root, "models", "model_four.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 4 - - # Remove model_three and change model_four to ref model_one - # and change schema file to remove model_three - rm_file(project.project_root, "models", "model_three.sql") - write_file(model_four2_sql, project.project_root, "models", "model_four.sql") - write_file(models_schema1_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # Remove model four, put back model three, put back schema file - write_file(model_three_sql, project.project_root, "models", "model_three.sql") - write_file(models_schema2_yml, project.project_root, "models", "schema.yml") - rm_file(project.project_root, "models", "model_four.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # disable model three in the schema file - write_file(models_schema4_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - - # update enabled config to be true for model three in the schema file - write_file(models_schema4b_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # disable model three in the schema file again - write_file(models_schema4_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - - # remove disabled config for model three in the schema file to check it gets enabled - write_file(models_schema4b_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # Add a macro - write_file(my_macro_sql, project.project_root, "macros", "my_macro.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - manifest = get_manifest(project.project_root) - macro_id = "macro.test.do_something" - assert macro_id in manifest.macros - - # Modify the macro - write_file(my_macro2_sql, project.project_root, "macros", "my_macro.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # Add a macro patch - write_file(models_schema3_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # Remove the macro - rm_file(project.project_root, "macros", "my_macro.sql") - with pytest.raises(CompilationError): - results = run_dbt(["--partial-parse", "--warn-error", "run"]) - - # put back macro file, got back to schema file with no macro - # add separate macro patch schema file - write_file(models_schema2_yml, project.project_root, "models", "schema.yml") - write_file(my_macro_sql, project.project_root, "macros", "my_macro.sql") - write_file(macros_yml, project.project_root, "macros", "macros.yml") - results = run_dbt(["--partial-parse", "run"]) - - # delete macro and schema file - rm_file(project.project_root, "macros", "my_macro.sql") - rm_file(project.project_root, "macros", "macros.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # Add an empty schema file - write_file(empty_schema_yml, project.project_root, "models", "eschema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # Add version to empty schema file - write_file(empty_schema_with_version_yml, project.project_root, "models", "eschema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # Disable model_three - write_file(model_three_disabled_sql, project.project_root, "models", "model_three.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - model_id = "model.test.model_three" - assert model_id in manifest.disabled - assert model_id not in manifest.nodes - - # Edit disabled model three - write_file(model_three_disabled2_sql, project.project_root, "models", "model_three.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - model_id = "model.test.model_three" - assert model_id in manifest.disabled - assert model_id not in manifest.nodes - - # Remove disabled from model three - write_file(model_three_sql, project.project_root, "models", "model_three.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - manifest = get_manifest(project.project_root) - model_id = "model.test.model_three" - assert model_id in manifest.nodes - assert model_id not in manifest.disabled - - -class TestSources: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - } - - def test_pp_sources(self, project): - # initial run - write_file(raw_customers_csv, project.project_root, "seeds", "raw_customers.csv") - write_file(sources_tests1_sql, project.project_root, "macros", "tests.sql") - results = run_dbt(["run"]) - assert len(results) == 1 - - # Partial parse running 'seed' - run_dbt(["--partial-parse", "seed"]) - manifest = get_manifest(project.project_root) - seed_file_id = "test://" + normalize("seeds/raw_customers.csv") - assert seed_file_id in manifest.files - - # Add another seed file - write_file(raw_customers_csv, project.project_root, "seeds", "more_customers.csv") - run_dbt(["--partial-parse", "run"]) - seed_file_id = "test://" + normalize("seeds/more_customers.csv") - manifest = get_manifest(project.project_root) - assert seed_file_id in manifest.files - seed_id = "seed.test.more_customers" - assert seed_id in manifest.nodes - - # Remove seed file and add a schema files with a source referring to raw_customers - rm_file(project.project_root, "seeds", "more_customers.csv") - write_file(schema_sources1_yml, project.project_root, "models", "sources.yml") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - assert len(manifest.sources) == 1 - file_id = "test://" + normalize("models/sources.yml") - assert file_id in manifest.files - - # add a model referring to raw_customers source - write_file(customers_sql, project.project_root, "models", "customers.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - - # remove sources schema file - rm_file(project.project_root, "models", "sources.yml") - with pytest.raises(CompilationError): - results = run_dbt(["--partial-parse", "run"]) - - # put back sources and add an exposures file - write_file(schema_sources2_yml, project.project_root, "models", "sources.yml") - results = run_dbt(["--partial-parse", "run"]) - - # remove seed referenced in exposures file - rm_file(project.project_root, "seeds", "raw_customers.csv") - with pytest.raises(CompilationError): - results = run_dbt(["--partial-parse", "run"]) - - # put back seed and remove depends_on from exposure - write_file(raw_customers_csv, project.project_root, "seeds", "raw_customers.csv") - write_file(schema_sources3_yml, project.project_root, "models", "sources.yml") - results = run_dbt(["--partial-parse", "run"]) - - # Add seed config with test to schema.yml, remove exposure - write_file(schema_sources4_yml, project.project_root, "models", "sources.yml") - results = run_dbt(["--partial-parse", "run"]) - - # Change seed name to wrong name - write_file(schema_sources5_yml, project.project_root, "models", "sources.yml") - with pytest.raises(CompilationError): - results = run_dbt(["--partial-parse", "--warn-error", "run"]) - - # Put back seed name to right name - write_file(schema_sources4_yml, project.project_root, "models", "sources.yml") - results = run_dbt(["--partial-parse", "run"]) - - # Add docs file customers.md - write_file(customers1_md, project.project_root, "models", "customers.md") - results = run_dbt(["--partial-parse", "run"]) - - # Change docs file customers.md - write_file(customers2_md, project.project_root, "models", "customers.md") - results = run_dbt(["--partial-parse", "run"]) - - # Delete docs file - rm_file(project.project_root, "models", "customers.md") - results = run_dbt(["--partial-parse", "run"]) - - # Add a data test - write_file(test_macro_sql, project.project_root, "macros", "test-macro.sql") - write_file(my_test_sql, project.project_root, "tests", "my_test.sql") - results = run_dbt(["--partial-parse", "test"]) - manifest = get_manifest(project.project_root) - assert len(manifest.nodes) == 9 - test_id = "test.test.my_test" - assert test_id in manifest.nodes - - # Change macro that data test depends on - write_file(test_macro2_sql, project.project_root, "macros", "test-macro.sql") - results = run_dbt(["--partial-parse", "test"]) - manifest = get_manifest(project.project_root) - - # Add an analysis - write_file(my_analysis_sql, project.project_root, "analyses", "my_analysis.sql") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - - # Remove data test - rm_file(project.project_root, "tests", "my_test.sql") - results = run_dbt(["--partial-parse", "test"]) - manifest = get_manifest(project.project_root) - assert len(manifest.nodes) == 9 - - # Remove analysis - rm_file(project.project_root, "analyses", "my_analysis.sql") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - assert len(manifest.nodes) == 8 - - # Change source test - write_file(sources_tests2_sql, project.project_root, "macros", "tests.sql") - results = run_dbt(["--partial-parse", "run"]) - - -class TestPartialParsingDependency: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - } - - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - local_dependency_files = { - "dbt_project.yml": local_dependency__dbt_project_yml, - "models": { - "schema.yml": local_dependency__models__schema_yml, - "model_to_import.sql": local_dependency__models__model_to_import_sql, - }, - "macros": {"dep_macro.sql": local_dependency__macros__dep_macro_sql}, - "seeds": {"seed.csv": local_dependency__seeds__seed_csv}, - } - write_project_files(project_root, "local_dependency", local_dependency_files) - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "local_dependency"}]} - - def test_parsing_with_dependency(self, project): - run_dbt(["clean"]) - run_dbt(["deps"]) - run_dbt(["seed"]) - run_dbt(["run"]) - - # Add a source override - write_file(schema_models_c_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - assert len(manifest.sources) == 1 - source_id = "source.local_dep.seed_source.seed" - assert source_id in manifest.sources - # We have 1 root model, 1 local_dep model, 1 local_dep seed, 1 local_dep source test, 2 root source tests - assert len(manifest.nodes) == 5 - test_id = "test.local_dep.source_unique_seed_source_seed_id.afa94935ed" - assert test_id in manifest.nodes - - # Remove a source override - rm_file(project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - assert len(manifest.sources) == 1 - - -class TestNestedMacros: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": model_a_sql, - "model_b.sql": model_b_sql, - "schema.yml": macros_schema_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "custom_schema_tests.sql": custom_schema_tests1_sql, - } - - def test_nested_macros(self, project): - results = run_dbt() - assert len(results) == 2 - manifest = get_manifest(project.project_root) - macro_child_map = manifest.build_macro_child_map() - macro_unique_id = "macro.test.test_type_two" - assert macro_unique_id in macro_child_map - - results = run_dbt(["test"], expect_pass=False) - results = sorted(results, key=lambda r: r.node.name) - assert len(results) == 2 - # type_one_model_a_ - assert results[0].status == TestStatus.Fail - assert re.search(r"union all", results[0].node.compiled_code) - # type_two_model_a_ - assert results[1].status == TestStatus.Warn - assert results[1].node.config.severity == "WARN" - - write_file( - custom_schema_tests2_sql, project.project_root, "macros", "custom_schema_tests.sql" - ) - results = run_dbt(["--partial-parse", "test"], expect_pass=False) - manifest = get_manifest(project.project_root) - test_node_id = "test.test.type_two_model_a_.842bc6c2a7" - assert test_node_id in manifest.nodes - results = sorted(results, key=lambda r: r.node.name) - assert len(results) == 2 - # type_two_model_a_ - assert results[1].status == TestStatus.Fail - assert results[1].node.config.severity == "ERROR" - - -class TestSkipMacros: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - "eschema.yml": empty_schema_yml, - } - - def test_skip_macros(self, project): - # initial run so we have a msgpack file - # includes empty_schema file for bug #4850 - results = run_dbt() - - # add a new ref override macro - write_file(ref_override_sql, project.project_root, "macros", "ref_override.sql") - results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) - assert "Starting full parse." in log_output - - # modify a ref override macro - write_file(ref_override2_sql, project.project_root, "macros", "ref_override.sql") - results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) - assert "Starting full parse." in log_output - - # remove a ref override macro - rm_file(project.project_root, "macros", "ref_override.sql") - results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) - assert "Starting full parse." in log_output - - # custom generate_schema_name macro - write_file(gsm_override_sql, project.project_root, "macros", "gsm_override.sql") - results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) - assert "Starting full parse." in log_output - - # change generate_schema_name macro - write_file(gsm_override2_sql, project.project_root, "macros", "gsm_override.sql") - results, log_output = run_dbt_and_capture(["--partial-parse", "run"]) - assert "Starting full parse." in log_output - - -class TestSnapshots: - @pytest.fixture(scope="class") - def models(self): - return { - "orders.sql": orders_sql, - } - - def test_pp_snapshots(self, project): - # initial run - results = run_dbt() - assert len(results) == 1 - - # add snapshot - write_file(snapshot_sql, project.project_root, "snapshots", "snapshot.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - snapshot_id = "snapshot.test.orders_snapshot" - assert snapshot_id in manifest.nodes - snapshot2_id = "snapshot.test.orders2_snapshot" - assert snapshot2_id in manifest.nodes - - # run snapshot - results = run_dbt(["--partial-parse", "snapshot"]) - assert len(results) == 2 - - # modify snapshot - write_file(snapshot2_sql, project.project_root, "snapshots", "snapshot.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 1 - - # delete snapshot - rm_file(project.project_root, "snapshots", "snapshot.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 1 - - -class TestTests: - @pytest.fixture(scope="class") - def models(self): - return { - "orders.sql": orders_sql, - "schema.yml": generic_schema_yml, - } - - @pytest.fixture(scope="class") - def tests(self): - # Make sure "generic" directory is created - return {"generic": {"readme.md": ""}} - - def test_pp_generic_tests(self, project): - # initial run - results = run_dbt() - assert len(results) == 1 - manifest = get_manifest(project.project_root) - expected_nodes = ["model.test.orders", "test.test.unique_orders_id.1360ecc70e"] - assert expected_nodes == list(manifest.nodes.keys()) - - # add generic test in test-path - write_file(generic_test_sql, project.project_root, "tests", "generic", "generic_test.sql") - write_file(generic_test_schema_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - test_id = "test.test.is_odd_orders_id.82834fdc5b" - assert test_id in manifest.nodes - expected_nodes = [ - "model.test.orders", - "test.test.unique_orders_id.1360ecc70e", - "test.test.is_odd_orders_id.82834fdc5b", - ] - assert expected_nodes == list(manifest.nodes.keys()) - - # edit generic test in test-path - write_file( - generic_test_edited_sql, project.project_root, "tests", "generic", "generic_test.sql" - ) - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - test_id = "test.test.is_odd_orders_id.82834fdc5b" - assert test_id in manifest.nodes - expected_nodes = [ - "model.test.orders", - "test.test.unique_orders_id.1360ecc70e", - "test.test.is_odd_orders_id.82834fdc5b", - ] - assert expected_nodes == list(manifest.nodes.keys()) - - -class TestExternalModels: - @pytest.fixture(scope="class") - def external_model_node(self): - return ModelNodeArgs( - name="external_model", - package_name="external", - identifier="test_identifier", - schema="test_schema", - ) - - @pytest.fixture(scope="class") - def external_model_node_versioned(self): - return ModelNodeArgs( - name="external_model_versioned", - package_name="external", - identifier="test_identifier_v1", - schema="test_schema", - version=1, - ) - - @pytest.fixture(scope="class") - def external_model_node_depends_on(self): - return ModelNodeArgs( - name="external_model_depends_on", - package_name="external", - identifier="test_identifier_depends_on", - schema="test_schema", - depends_on_nodes=["model.external.external_model_depends_on_parent"], - ) - - @pytest.fixture(scope="class") - def external_model_node_depends_on_parent(self): - return ModelNodeArgs( - name="external_model_depends_on_parent", - package_name="external", - identifier="test_identifier_depends_on_parent", - schema="test_schema", - ) - - @pytest.fixture(scope="class") - def models(self): - return {"model_one.sql": model_one_sql} - - @mock.patch("dbt.plugins.get_plugin_manager") - def test_pp_external_models( - self, - get_plugin_manager, - project, - external_model_node, - external_model_node_versioned, - external_model_node_depends_on, - external_model_node_depends_on_parent, - ): - # initial plugin - one external model - external_nodes = PluginNodes() - external_nodes.add_model(external_model_node) - get_plugin_manager.return_value.get_nodes.return_value = external_nodes - - # initial parse - manifest = run_dbt(["parse"]) - assert len(manifest.nodes) == 2 - assert set(manifest.nodes.keys()) == { - "model.external.external_model", - "model.test.model_one", - } - assert len(manifest.external_node_unique_ids) == 1 - assert manifest.external_node_unique_ids == ["model.external.external_model"] - - # add a model file - write_file(model_two_sql, project.project_root, "models", "model_two.sql") - manifest = run_dbt(["--partial-parse", "parse"]) - assert len(manifest.nodes) == 3 - - # add an external model - external_nodes.add_model(external_model_node_versioned) - manifest = run_dbt(["--partial-parse", "parse"]) - assert len(manifest.nodes) == 4 - assert len(manifest.external_node_unique_ids) == 2 - - # add a model file that depends on external model - write_file( - "SELECT * FROM {{ref('external', 'external_model')}}", - project.project_root, - "models", - "model_depends_on_external.sql", - ) - manifest = run_dbt(["--partial-parse", "parse"]) - assert len(manifest.nodes) == 5 - assert len(manifest.external_node_unique_ids) == 2 - - # remove a model file that depends on external model - rm_file(project.project_root, "models", "model_depends_on_external.sql") - manifest = run_dbt(["--partial-parse", "parse"]) - assert len(manifest.nodes) == 4 - - # add an external node with depends on - external_nodes.add_model(external_model_node_depends_on) - external_nodes.add_model(external_model_node_depends_on_parent) - manifest = run_dbt(["--partial-parse", "parse"]) - assert len(manifest.nodes) == 6 - assert len(manifest.external_node_unique_ids) == 4 - - # skip files parsing - ensure no issues - run_dbt(["--partial-parse", "parse"]) - assert len(manifest.nodes) == 6 - assert len(manifest.external_node_unique_ids) == 4 - - -class TestPortablePartialParsing: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - } - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "local_dependency"}]} - - @pytest.fixture(scope="class") - def local_dependency_files(self): - return { - "dbt_project.yml": local_dependency__dbt_project_yml, - "models": { - "schema.yml": local_dependency__models__schema_yml, - "model_to_import.sql": local_dependency__models__model_to_import_sql, - }, - "macros": {"dep_macro.sql": local_dependency__macros__dep_macro_sql}, - "seeds": {"seed.csv": local_dependency__seeds__seed_csv}, - } - - def rename_project_root(self, project, new_project_root): - with up_one(new_project_root): - rename_dir(project.project_root, new_project_root) - project.project_root = new_project_root - # flags.project_dir is set during the project test fixture, and is persisted across run_dbt calls, - # so it needs to be reset between invocations - # flags.set_from_args(Namespace(PROJECT_DIR=new_project_root), None) - - @pytest.fixture(scope="class", autouse=True) - def initial_run_and_rename_project_dir(self, project, local_dependency_files): - initial_project_root = project.project_root - renamed_project_root = os.path.join(project.project_root.dirname, "renamed_project_dir") - - write_project_files(project.project_root, "local_dependency", local_dependency_files) - - # initial run - run_dbt(["deps"]) - assert len(run_dbt(["seed"])) == 1 - assert len(run_dbt(["run"])) == 2 - - self.rename_project_root(project, renamed_project_root) - yield - self.rename_project_root(project, initial_project_root) - - def test_pp_renamed_project_dir_unchanged_project_contents(self, project): - # partial parse same project in new absolute dir location, using partial_parse.msgpack created in previous dir - run_dbt(["deps"]) - assert len(run_dbt(["--partial-parse", "seed"])) == 1 - assert len(run_dbt(["--partial-parse", "run"])) == 2 - - def test_pp_renamed_project_dir_changed_project_contents(self, project): - write_file(model_two_sql, project.project_root, "models", "model_two.sql") - - # partial parse changed project in new absolute dir location, using partial_parse.msgpack created in previous dir - run_dbt(["deps"]) - len(run_dbt(["--partial-parse", "seed"])) == 1 - len(run_dbt(["--partial-parse", "run"])) == 3 diff --git a/tests/functional/partial_parsing/test_pp_disabled_config.py b/tests/functional/partial_parsing/test_pp_disabled_config.py deleted file mode 100644 index 8a4ece9d2..000000000 --- a/tests/functional/partial_parsing/test_pp_disabled_config.py +++ /dev/null @@ -1,224 +0,0 @@ -from dbt.tests.util import get_manifest, run_dbt, write_file -import pytest - - -model_one_sql = """ -select 1 as fun -""" - -metricflow_time_spine_sql = """ -SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day -""" - -schema1_yml = """ -version: 2 - -models: - - name: model_one - -semantic_models: - - name: semantic_people - model: ref('model_one') - dimensions: - - name: created_at - type: TIME - type_params: - time_granularity: day - measures: - - name: people - agg: count - expr: fun - entities: - - name: fun - type: primary - defaults: - agg_time_dimension: created_at - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' - -exposures: - - name: proxy_for_dashboard - description: "My Exposure" - type: "dashboard" - owner: - name: "Dashboard Tester" - email: "tester@dashboard.com" - depends_on: - - ref("model_one") -""" - -schema2_yml = """ -version: 2 - -models: - - name: model_one - -semantic_models: - - name: semantic_people - model: ref('model_one') - dimensions: - - name: created_at - type: TIME - type_params: - time_granularity: day - measures: - - name: people - agg: count - expr: fun - entities: - - name: fun - type: primary - defaults: - agg_time_dimension: created_at - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - config: - enabled: false - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' - -exposures: - - name: proxy_for_dashboard - description: "My Exposure" - config: - enabled: false - type: "dashboard" - owner: - name: "Dashboard Tester" - email: "tester@dashboard.com" - depends_on: - - ref("model_one") -""" - -schema3_yml = """ -version: 2 - -models: - - name: model_one - -semantic_models: - - name: semantic_people - model: ref('model_one') - dimensions: - - name: created_at - type: TIME - type_params: - time_granularity: day - measures: - - name: people - agg: count - expr: fun - entities: - - name: fun - type: primary - defaults: - agg_time_dimension: created_at - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: people - meta: - my_meta: 'testing' -""" - -schema4_yml = """ -version: 2 - -models: - - name: model_one - -exposures: - - name: proxy_for_dashboard - description: "My Exposure" - config: - enabled: false - type: "dashboard" - owner: - name: "Dashboard Tester" - email: "tester@dashboard.com" - depends_on: - - ref("model_one") -""" - - -class TestDisabled: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - "schema.yml": schema1_yml, - } - - def test_pp_disabled(self, project): - expected_exposure = "exposure.test.proxy_for_dashboard" - expected_metric = "metric.test.number_of_people" - - run_dbt(["seed"]) - manifest = run_dbt(["parse"]) - - assert expected_exposure in manifest.exposures - assert expected_metric in manifest.metrics - assert expected_exposure not in manifest.disabled - assert expected_metric not in manifest.disabled - - # Update schema file with disabled metric and exposure - write_file(schema2_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - assert expected_exposure not in manifest.exposures - assert expected_metric not in manifest.metrics - assert expected_exposure in manifest.disabled - assert expected_metric in manifest.disabled - - # Update schema file with enabled metric and exposure - write_file(schema1_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - assert expected_exposure in manifest.exposures - assert expected_metric in manifest.metrics - assert expected_exposure not in manifest.disabled - assert expected_metric not in manifest.disabled - - # Update schema file - remove exposure, enable metric - write_file(schema3_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - assert expected_exposure not in manifest.exposures - assert expected_metric in manifest.metrics - assert expected_exposure not in manifest.disabled - assert expected_metric not in manifest.disabled - - # Update schema file - add back exposure, remove metric - write_file(schema4_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - assert expected_exposure not in manifest.exposures - assert expected_metric not in manifest.metrics - assert expected_exposure in manifest.disabled - assert expected_metric not in manifest.disabled diff --git a/tests/functional/partial_parsing/test_pp_docs.py b/tests/functional/partial_parsing/test_pp_docs.py deleted file mode 100644 index 5df08d4d1..000000000 --- a/tests/functional/partial_parsing/test_pp_docs.py +++ /dev/null @@ -1,257 +0,0 @@ -from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file -import pytest - - -model_one_sql = """ -select 1 as fun -""" - -raw_customers_csv = """id,first_name,last_name,email -1,Michael,Perez,mperez0@chronoengine.com -2,Shawn,Mccoy,smccoy1@reddit.com -3,Kathleen,Payne,kpayne2@cargocollective.com -4,Jimmy,Cooper,jcooper3@cargocollective.com -5,Katherine,Rice,krice4@typepad.com -6,Sarah,Ryan,sryan5@gnu.org -7,Martin,Mcdonald,mmcdonald6@opera.com -8,Frank,Robinson,frobinson7@wunderground.com -9,Jennifer,Franklin,jfranklin8@mail.ru -10,Henry,Welch,hwelch9@list-manage.com -""" - -my_macro_sql = """ -{% macro my_macro(something) %} - - select - '{{ something }}' as something2 - -{% endmacro %} - -""" - -customers1_md = """ -{% docs customer_table %} - -This table contains customer data - -{% enddocs %} -""" - -customers2_md = """ -{% docs customer_table %} - -LOTS of customer data - -{% enddocs %} - -""" - -schema1_yml = """ -version: 2 - -models: - - name: model_one - description: "{{ doc('customer_table') }}" -""" - -schema2_yml = """ -version: 2 - -models: - - name: model_one - description: "{{ doc('customer_table') }}" - -macros: - - name: my_macro - description: "{{ doc('customer_table') }}" - -sources: - - name: seed_sources - description: "{{ doc('customer_table') }}" - schema: "{{ target.schema }}" - tables: - - name: raw_customers - columns: - - name: id - data_tests: - - not_null: - severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - - unique - - name: first_name - - name: last_name - - name: email - -exposures: - - name: proxy_for_dashboard - description: "{{ doc('customer_table') }}" - type: "dashboard" - owner: - name: "Dashboard Tester" - email: "tester@dashboard.com" - depends_on: - - ref("model_one") - - ref("raw_customers") - - source("seed_sources", "raw_customers") -""" - - -class TestDocs: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - } - - @pytest.fixture(scope="class") - def seeds(self): - return { - "raw_customers.csv": raw_customers_csv, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "my_macro.sql": my_macro_sql, - } - - def test_pp_docs(self, project): - run_dbt(["seed"]) - results = run_dbt(["run"]) - assert len(results) == 1 - - # Add docs file customers.md - write_file(customers1_md, project.project_root, "models", "customers.md") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - assert len(manifest.docs) == 2 - - # Add schema file with 'docs' description - write_file(schema1_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - assert len(manifest.docs) == 2 - doc_id = "doc.test.customer_table" - assert doc_id in manifest.docs - doc = manifest.docs[doc_id] - doc_file_id = doc.file_id - assert doc_file_id in manifest.files - source_file = manifest.files[doc_file_id] - assert len(source_file.nodes) == 1 - model_one_id = "model.test.model_one" - assert model_one_id in source_file.nodes - model_node = manifest.nodes[model_one_id] - assert model_node.description == "This table contains customer data" - - # Update the doc file - write_file(customers2_md, project.project_root, "models", "customers.md") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - assert len(manifest.docs) == 2 - assert model_one_id in manifest.nodes - model_node = manifest.nodes[model_one_id] - assert "LOTS" in model_node.description - - # Add a macro patch, source and exposure with doc - write_file(schema2_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - doc_file = manifest.files[doc_file_id] - expected_nodes = [ - "model.test.model_one", - "source.test.seed_sources.raw_customers", - "macro.test.my_macro", - "exposure.test.proxy_for_dashboard", - ] - assert expected_nodes == doc_file.nodes - source_id = "source.test.seed_sources.raw_customers" - assert manifest.sources[source_id].source_description == "LOTS of customer data" - macro_id = "macro.test.my_macro" - assert manifest.macros[macro_id].description == "LOTS of customer data" - exposure_id = "exposure.test.proxy_for_dashboard" - assert manifest.exposures[exposure_id].description == "LOTS of customer data" - - # update the doc file again - write_file(customers1_md, project.project_root, "models", "customers.md") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - source_file = manifest.files[doc_file_id] - assert model_one_id in source_file.nodes - model_node = manifest.nodes[model_one_id] - assert model_node.description == "This table contains customer data" - assert ( - manifest.sources[source_id].source_description == "This table contains customer data" - ) - assert manifest.macros[macro_id].description == "This table contains customer data" - assert manifest.exposures[exposure_id].description == "This table contains customer data" - - # check that _lock is working - with manifest._lock: - assert manifest._lock - - -my_model_yml = """ -version: 2 -models: - - name: my_model - columns: - - name: id - description: "{{ doc('whatever') }}" -""" - -my_model_no_description_yml = """ -version: 2 -models: - - name: my_model - columns: - - name: id -""" - -my_model_md = """ -{% docs whatever %} - cool stuff -{% enddocs %} -""" - - -class TestDocsRemoveReplace: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": "select 1 as id", - "my_model.yml": my_model_yml, - "my_model.md": my_model_md, - } - - def test_remove_replace(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - doc_id = "doc.test.whatever" - assert doc_id in manifest.docs - doc = manifest.docs[doc_id] - doc_file = manifest.files[doc.file_id] - - model_id = "model.test.my_model" - assert model_id in manifest.nodes - - assert doc_file.nodes == [model_id] - - model = manifest.nodes[model_id] - model_file_id = model.file_id - assert model_file_id in manifest.files - - # remove the doc file - rm_file(project.project_root, "models", "my_model.md") - # remove description from schema file - write_file(my_model_no_description_yml, project.project_root, "models", "my_model.yml") - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert doc_id not in manifest.docs - # The bug was that the file still existed in manifest.files - assert doc.file_id not in manifest.files - - # put back the doc file - write_file(my_model_md, project.project_root, "models", "my_model.md") - # put back the description in the schema file - write_file(my_model_yml, project.project_root, "models", "my_model.yml") - run_dbt(["parse"]) diff --git a/tests/functional/partial_parsing/test_pp_groups.py b/tests/functional/partial_parsing/test_pp_groups.py deleted file mode 100644 index f75776832..000000000 --- a/tests/functional/partial_parsing/test_pp_groups.py +++ /dev/null @@ -1,155 +0,0 @@ -from dbt.exceptions import ParsingError -from dbt.tests.util import get_manifest, run_dbt, write_file -import pytest - -from tests.functional.partial_parsing.fixtures import ( - groups_schema_yml_one_group, - groups_schema_yml_one_group_model_in_group2, - groups_schema_yml_two_groups, - groups_schema_yml_two_groups_edited, - groups_schema_yml_two_groups_private_orders_invalid_access, - groups_schema_yml_two_groups_private_orders_valid_access, - orders_downstream_sql, - orders_sql, -) - - -class TestGroups: - @pytest.fixture(scope="class") - def models(self): - return { - "orders.sql": orders_sql, - "orders_downstream.sql": orders_downstream_sql, - "schema.yml": groups_schema_yml_one_group, - } - - def test_pp_groups(self, project): - # initial run - results = run_dbt() - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_nodes = ["model.test.orders", "model.test.orders_downstream"] - expected_groups = ["group.test.test_group"] - assert expected_nodes == sorted(list(manifest.nodes.keys())) - assert expected_groups == sorted(list(manifest.groups.keys())) - - # add group to schema - write_file(groups_schema_yml_two_groups, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_nodes = ["model.test.orders", "model.test.orders_downstream"] - expected_groups = ["group.test.test_group", "group.test.test_group2"] - assert expected_nodes == sorted(list(manifest.nodes.keys())) - assert expected_groups == sorted(list(manifest.groups.keys())) - - # edit group in schema - write_file( - groups_schema_yml_two_groups_edited, project.project_root, "models", "schema.yml" - ) - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_nodes = ["model.test.orders", "model.test.orders_downstream"] - expected_groups = ["group.test.test_group", "group.test.test_group2_edited"] - assert expected_nodes == sorted(list(manifest.nodes.keys())) - assert expected_groups == sorted(list(manifest.groups.keys())) - - # delete group in schema - write_file(groups_schema_yml_one_group, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_nodes = ["model.test.orders", "model.test.orders_downstream"] - expected_groups = ["group.test.test_group"] - assert expected_nodes == sorted(list(manifest.nodes.keys())) - assert expected_groups == sorted(list(manifest.groups.keys())) - - # add back second group - write_file(groups_schema_yml_two_groups, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - - # remove second group with model still configured to second group - write_file( - groups_schema_yml_one_group_model_in_group2, - project.project_root, - "models", - "schema.yml", - ) - with pytest.raises(ParsingError): - results = run_dbt(["--partial-parse", "run"]) - - # add back second group, make orders private with valid ref - write_file( - groups_schema_yml_two_groups_private_orders_valid_access, - project.project_root, - "models", - "schema.yml", - ) - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - - write_file( - groups_schema_yml_two_groups_private_orders_invalid_access, - project.project_root, - "models", - "schema.yml", - ) - with pytest.raises(ParsingError): - results = run_dbt(["--partial-parse", "run"]) - - -my_model_c = """ -select * from {{ ref("my_model_a") }} union all -select * from {{ ref("my_model_b") }} -""" - -models_yml = """ -models: - - name: my_model_a - - name: my_model_b - - name: my_model_c -""" - -models_and_groups_yml = """ -groups: - - name: sales_analytics - owner: - name: Sales Analytics - email: sales@jaffleshop.com - -models: - - name: my_model_a - access: private - group: sales_analytics - - name: my_model_b - access: private - group: sales_analytics - - name: my_model_c - access: private - group: sales_analytics -""" - - -class TestAddingModelsToNewGroups: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model_a.sql": "select 1 as id", - "my_model_b.sql": "select 2 as id", - "my_model_c.sql": my_model_c, - "models.yml": models_yml, - } - - def test_adding_models_to_new_groups(self, project): - run_dbt(["compile"]) - # This tests that the correct patch is added to my_model_c. The bug - # was that it was using the old patch, so model_c didn't have the - # correct group and access. - write_file(models_and_groups_yml, project.project_root, "models", "models.yml") - run_dbt(["compile"]) - manifest = get_manifest(project.project_root) - model_c_node = manifest.nodes["model.test.my_model_c"] - assert model_c_node.group == "sales_analytics" - assert model_c_node.access == "private" diff --git a/tests/functional/partial_parsing/test_pp_metrics.py b/tests/functional/partial_parsing/test_pp_metrics.py deleted file mode 100644 index bc6884519..000000000 --- a/tests/functional/partial_parsing/test_pp_metrics.py +++ /dev/null @@ -1,85 +0,0 @@ -from dbt.tests.util import get_manifest, run_dbt, write_file -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.partial_parsing.fixtures import ( - metric_model_a_sql, - metricflow_time_spine_sql, - people_metrics_yml, - people_metrics2_yml, - people_metrics3_yml, - people_semantic_models_yml, - people_sql, -) - - -class TestMetrics: - @pytest.fixture(scope="class") - def models(self): - return { - "people.sql": people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - } - - def test_metrics(self, project): - # initial run - results = run_dbt(["run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - assert len(manifest.nodes) == 2 - - # Add metrics yaml file (and necessary semantic models yaml) - write_file( - people_semantic_models_yml, - project.project_root, - "models", - "people_semantic_models.yml", - ) - write_file(people_metrics_yml, project.project_root, "models", "people_metrics.yml") - results = run_dbt(["run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - assert len(manifest.metrics) == 2 - metric_people_id = "metric.test.number_of_people" - metric_people = manifest.metrics[metric_people_id] - expected_meta = {"my_meta": "testing"} - assert metric_people.meta == expected_meta - - # TODO: Bring back when we resolving `depends_on_nodes` - # metric_tenure_id = "metric.test.collective_tenure" - # metric_tenure = manifest.metrics[metric_tenure_id] - # assert metric_people.refs == [RefArgs(name="people")] - # assert metric_tenure.refs == [RefArgs(name="people")] - # expected_depends_on_nodes = ["model.test.people"] - # assert metric_people.depends_on.nodes == expected_depends_on_nodes - - # Change metrics yaml files - write_file(people_metrics2_yml, project.project_root, "models", "people_metrics.yml") - results = run_dbt(["run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - metric_people = manifest.metrics[metric_people_id] - expected_meta = {"my_meta": "replaced"} - assert metric_people.meta == expected_meta - # TODO: Bring back when we resolving `depends_on_nodes` - # expected_depends_on_nodes = ["model.test.people"] - # assert metric_people.depends_on.nodes == expected_depends_on_nodes - - # Add model referring to metric - write_file(metric_model_a_sql, project.project_root, "models", "metric_model_a.sql") - results = run_dbt(["run"]) - manifest = get_manifest(project.project_root) - # TODO: Bring back when we resolving `depends_on_nodes` - # model_a = manifest.nodes["model.test.metric_model_a"] - # expected_depends_on_nodes = [ - # "metric.test.number_of_people", - # "metric.test.collective_tenure", - # ] - # assert model_a.depends_on.nodes == expected_depends_on_nodes - - # Then delete a metric - write_file(people_metrics3_yml, project.project_root, "models", "people_metrics.yml") - with pytest.raises(CompilationError): - # We use "parse" here and not "run" because we're checking that the CompilationError - # occurs at parse time, not compilation - results = run_dbt(["parse"]) diff --git a/tests/functional/partial_parsing/test_pp_vars.py b/tests/functional/partial_parsing/test_pp_vars.py deleted file mode 100644 index c903cdeab..000000000 --- a/tests/functional/partial_parsing/test_pp_vars.py +++ /dev/null @@ -1,398 +0,0 @@ -import os -from pathlib import Path - -from dbt.adapters.exceptions import FailedToConnectError -from dbt.constants import SECRET_ENV_PREFIX -from dbt.exceptions import ParsingError -from dbt.tests.util import get_manifest, write_file -import pytest - -from tests.functional.partial_parsing.fixtures import ( - env_var_macro_sql, - env_var_macros_yml, - env_var_metrics_yml, - env_var_model_one_sql, - env_var_model_sql, - env_var_model_test_yml, - env_var_schema_yml, - env_var_schema2_yml, - env_var_schema3_yml, - env_var_sources_yml, - metricflow_time_spine_sql, - model_color_sql, - model_one_sql, - people_semantic_models_yml, - people_sql, - raw_customers_csv, - test_color_sql, -) -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -os.environ["DBT_PP_TEST"] = "true" - - -class TestEnvVars: - @pytest.fixture(scope="class") - def models(self): - return { - "model_color.sql": model_color_sql, - } - - def test_env_vars_models(self, project): - # initial run - results = run_dbt(["run"]) - assert len(results) == 1 - - # copy a file with an env_var call without an env_var - write_file(env_var_model_sql, project.project_root, "models", "env_var_model.sql") - with pytest.raises(ParsingError): - results = run_dbt(["--partial-parse", "run"]) - - # set the env var - os.environ["ENV_VAR_TEST"] = "TestingEnvVars" - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_env_vars = {"ENV_VAR_TEST": "TestingEnvVars"} - assert expected_env_vars == manifest.env_vars - model_id = "model.test.env_var_model" - model = manifest.nodes[model_id] - model_created_at = model.created_at - - # change the env var - os.environ["ENV_VAR_TEST"] = "second" - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_env_vars = {"ENV_VAR_TEST": "second"} - assert expected_env_vars == manifest.env_vars - assert model_created_at != manifest.nodes[model_id].created_at - - # set an env_var in a schema file - write_file(env_var_schema_yml, project.project_root, "models", "schema.yml") - write_file(env_var_model_one_sql, project.project_root, "models", "model_one.sql") - with pytest.raises(ParsingError): - results = run_dbt(["--partial-parse", "run"]) - - # actually set the env_var - os.environ["TEST_SCHEMA_VAR"] = "view" - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - expected_env_vars = {"ENV_VAR_TEST": "second", "TEST_SCHEMA_VAR": "view"} - assert expected_env_vars == manifest.env_vars - - # env vars in a source - os.environ["ENV_VAR_DATABASE"] = "dbt" - os.environ["ENV_VAR_SEVERITY"] = "warn" - write_file(raw_customers_csv, project.project_root, "seeds", "raw_customers.csv") - write_file(env_var_sources_yml, project.project_root, "models", "sources.yml") - run_dbt(["--partial-parse", "seed"]) - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - manifest = get_manifest(project.project_root) - expected_env_vars = { - "ENV_VAR_TEST": "second", - "TEST_SCHEMA_VAR": "view", - "ENV_VAR_DATABASE": "dbt", - "ENV_VAR_SEVERITY": "warn", - } - assert expected_env_vars == manifest.env_vars - assert len(manifest.sources) == 1 - source_id = "source.test.seed_sources.raw_customers" - source = manifest.sources[source_id] - assert source.database == "dbt" - schema_file = manifest.files[source.file_id] - test_id = "test.test.source_not_null_seed_sources_raw_customers_id.e39ee7bf0d" - test_node = manifest.nodes[test_id] - assert test_node.config.severity == "WARN" - - # Change severity env var - os.environ["ENV_VAR_SEVERITY"] = "error" - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - expected_env_vars = { - "ENV_VAR_TEST": "second", - "TEST_SCHEMA_VAR": "view", - "ENV_VAR_DATABASE": "dbt", - "ENV_VAR_SEVERITY": "error", - } - assert expected_env_vars == manifest.env_vars - source_id = "source.test.seed_sources.raw_customers" - source = manifest.sources[source_id] - schema_file = manifest.files[source.file_id] - expected_schema_file_env_vars = { - "sources": {"seed_sources": ["ENV_VAR_DATABASE", "ENV_VAR_SEVERITY"]} - } - assert expected_schema_file_env_vars == schema_file.env_vars - test_node = manifest.nodes[test_id] - assert test_node.config.severity == "ERROR" - - # Change database env var - os.environ["ENV_VAR_DATABASE"] = "test_dbt" - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - expected_env_vars = { - "ENV_VAR_TEST": "second", - "TEST_SCHEMA_VAR": "view", - "ENV_VAR_DATABASE": "test_dbt", - "ENV_VAR_SEVERITY": "error", - } - assert expected_env_vars == manifest.env_vars - source = manifest.sources[source_id] - assert source.database == "test_dbt" - - # Delete database env var - del os.environ["ENV_VAR_DATABASE"] - with pytest.raises(ParsingError): - results = run_dbt(["--partial-parse", "run"]) - os.environ["ENV_VAR_DATABASE"] = "test_dbt" - - # Add generic test with test kwarg that's rendered late (no curly brackets) - os.environ["ENV_VAR_DATABASE"] = "dbt" - write_file(test_color_sql, project.project_root, "macros", "test_color.sql") - results = run_dbt(["--partial-parse", "run"]) - # Add source test using test_color and an env_var for color - write_file(env_var_schema2_yml, project.project_root, "models/schema.yml") - with pytest.raises(ParsingError): - results = run_dbt(["--partial-parse", "run"]) - os.environ["ENV_VAR_COLOR"] = "green" - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - test_color_id = "test.test.check_color_model_one_env_var_ENV_VAR_COLOR___fun.89638de387" - test_node = manifest.nodes[test_color_id] - # kwarg was rendered but not changed (it will be rendered again when compiled) - assert test_node.test_metadata.kwargs["color"] == "env_var('ENV_VAR_COLOR')" - results = run_dbt(["--partial-parse", "test"]) - - # Add an exposure with an env_var - os.environ["ENV_VAR_OWNER"] = "John Doe" - write_file(env_var_schema3_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - expected_env_vars = { - "ENV_VAR_TEST": "second", - "TEST_SCHEMA_VAR": "view", - "ENV_VAR_DATABASE": "dbt", - "ENV_VAR_SEVERITY": "error", - "ENV_VAR_COLOR": "green", - "ENV_VAR_OWNER": "John Doe", - } - assert expected_env_vars == manifest.env_vars - exposure = list(manifest.exposures.values())[0] - schema_file = manifest.files[exposure.file_id] - expected_sf_env_vars = { - "models": {"model_one": ["TEST_SCHEMA_VAR", "ENV_VAR_COLOR"]}, - "exposures": {"proxy_for_dashboard": ["ENV_VAR_OWNER"]}, - } - assert expected_sf_env_vars == schema_file.env_vars - - # add a macro and a macro schema file - os.environ["ENV_VAR_SOME_KEY"] = "toodles" - write_file(env_var_macro_sql, project.project_root, "macros", "env_var_macro.sql") - write_file(env_var_macros_yml, project.project_root, "macros", "env_var_macros.yml") - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - expected_env_vars = { - "ENV_VAR_TEST": "second", - "TEST_SCHEMA_VAR": "view", - "ENV_VAR_DATABASE": "dbt", - "ENV_VAR_SEVERITY": "error", - "ENV_VAR_COLOR": "green", - "ENV_VAR_OWNER": "John Doe", - "ENV_VAR_SOME_KEY": "toodles", - } - assert expected_env_vars == manifest.env_vars - macro_id = "macro.test.do_something" - macro = manifest.macros[macro_id] - assert macro.meta == {"some_key": "toodles"} - # change the env var - os.environ["ENV_VAR_SOME_KEY"] = "dumdedum" - results = run_dbt(["--partial-parse", "run"]) - manifest = get_manifest(project.project_root) - macro = manifest.macros[macro_id] - assert macro.meta == {"some_key": "dumdedum"} - - # Add a schema file with a test on model_color and env_var in test enabled config - write_file(env_var_model_test_yml, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - manifest = get_manifest(project.project_root) - model_color = manifest.nodes["model.test.model_color"] - schema_file = manifest.files[model_color.patch_path] - expected_env_vars = { - "models": { - "model_one": ["TEST_SCHEMA_VAR", "ENV_VAR_COLOR"], - "model_color": ["ENV_VAR_ENABLED"], - }, - "exposures": {"proxy_for_dashboard": ["ENV_VAR_OWNER"]}, - } - assert expected_env_vars == schema_file.env_vars - - # Add a metrics file with env_vars - os.environ["ENV_VAR_METRICS"] = "TeStInG" - write_file(people_sql, project.project_root, "models", "people.sql") - write_file( - metricflow_time_spine_sql, project.project_root, "models", "metricflow_time_spine.sql" - ) - write_file( - people_semantic_models_yml, project.project_root, "models", "semantic_models.yml" - ) - write_file(env_var_metrics_yml, project.project_root, "models", "metrics.yml") - results = run_dbt(["run"]) - manifest = get_manifest(project.project_root) - assert "ENV_VAR_METRICS" in manifest.env_vars - assert manifest.env_vars["ENV_VAR_METRICS"] == "TeStInG" - metric_node = manifest.metrics["metric.test.number_of_people"] - assert metric_node.meta == {"my_meta": "TeStInG"} - - # Change metrics env var - os.environ["ENV_VAR_METRICS"] = "Changed!" - results = run_dbt(["run"]) - manifest = get_manifest(project.project_root) - metric_node = manifest.metrics["metric.test.number_of_people"] - assert metric_node.meta == {"my_meta": "Changed!"} - - # delete the env vars to cleanup - del os.environ["ENV_VAR_TEST"] - del os.environ["ENV_VAR_SEVERITY"] - del os.environ["ENV_VAR_DATABASE"] - del os.environ["TEST_SCHEMA_VAR"] - del os.environ["ENV_VAR_COLOR"] - del os.environ["ENV_VAR_SOME_KEY"] - del os.environ["ENV_VAR_OWNER"] - del os.environ["ENV_VAR_METRICS"] - - -class TestProjectEnvVars: - @pytest.fixture(scope="class") - def project_config_update(self): - # Need to set the environment variable here initially because - # the project fixture loads the config. - os.environ["ENV_VAR_NAME"] = "Jane Smith" - return {"models": {"+meta": {"meta_name": "{{ env_var('ENV_VAR_NAME') }}"}}} - - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - } - - def test_project_env_vars(self, project): - # Initial run - results = run_dbt(["run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - state_check = manifest.state_check - model_id = "model.test.model_one" - model = manifest.nodes[model_id] - assert model.config.meta["meta_name"] == "Jane Smith" - env_vars_hash_checksum = state_check.project_env_vars_hash.checksum - - # Change the environment variable - os.environ["ENV_VAR_NAME"] = "Jane Doe" - results = run_dbt(["run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_id] - assert model.config.meta["meta_name"] == "Jane Doe" - assert env_vars_hash_checksum != manifest.state_check.project_env_vars_hash.checksum - - # cleanup - del os.environ["ENV_VAR_NAME"] - - -class TestProfileEnvVars: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - } - - @pytest.fixture(scope="class") - def dbt_profile_target(self): - # Need to set these here because the base integration test class - # calls 'load_config' before the tests are run. - # Note: only the specified profile is rendered, so there's no - # point it setting env_vars in non-used profiles. - os.environ["ENV_VAR_USER"] = "root" - os.environ["ENV_VAR_PASS"] = "password" - return { - "type": "postgres", - "threads": 4, - "host": "localhost", - "port": 5432, - "user": "{{ env_var('ENV_VAR_USER') }}", - "pass": "{{ env_var('ENV_VAR_PASS') }}", - "dbname": "dbt", - } - - def test_profile_env_vars(self, project, logs_dir): - # Initial run - os.environ["ENV_VAR_USER"] = "root" - os.environ["ENV_VAR_PASS"] = "password" - - run_dbt(["run"]) - manifest = get_manifest(project.project_root) - env_vars_checksum = manifest.state_check.profile_env_vars_hash.checksum - - # Change env_vars, the user doesn't exist, this should fail - os.environ["ENV_VAR_USER"] = "fake_user" - - # N.B. run_dbt_and_capture won't work here because FailedToConnectError ends the test entirely - with pytest.raises(FailedToConnectError): - run_dbt(["run"], expect_pass=False) - - log_output = Path(logs_dir, "dbt.log").read_text() - assert "env vars used in profiles.yml have changed" in log_output - - manifest = get_manifest(project.project_root) - assert env_vars_checksum != manifest.state_check.profile_env_vars_hash.checksum - - -class TestProfileSecretEnvVars: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - } - - @property - def dbt_profile_target(self): - # Need to set these here because the base integration test class - # calls 'load_config' before the tests are run. - # Note: only the specified profile is rendered, so there's no - # point in setting env_vars in non-used profiles. - - # user is secret and password is not. postgres on macos doesn't care if the password - # changes so we have to change the user. related: https://github.com/dbt-labs/dbt-core/pull/4250 - os.environ[SECRET_ENV_PREFIX + "USER"] = "root" - os.environ["ENV_VAR_PASS"] = "password" - return { - "type": "postgres", - "threads": 4, - "host": "localhost", - "port": 5432, - "user": "{{ env_var('DBT_ENV_SECRET_USER') }}", - "pass": "{{ env_var('ENV_VAR_PASS') }}", - "dbname": "dbt", - } - - def test_profile_secret_env_vars(self, project): - # Initial run - os.environ[SECRET_ENV_PREFIX + "USER"] = "root" - os.environ["ENV_VAR_PASS"] = "password" - - results = run_dbt(["run"]) - manifest = get_manifest(project.project_root) - env_vars_checksum = manifest.state_check.profile_env_vars_hash.checksum - - # Change a secret var, it shouldn't register because we shouldn't save secrets. - os.environ[SECRET_ENV_PREFIX + "USER"] = "fake_user" - # we just want to see if the manifest has included - # the secret in the hash of environment variables. - (results, log_output) = run_dbt_and_capture(["run"], expect_pass=True) - # I020 is the event code for "env vars used in profiles.yml have changed" - assert not ("I020" in log_output) - manifest = get_manifest(project.project_root) - assert env_vars_checksum == manifest.state_check.profile_env_vars_hash.checksum diff --git a/tests/functional/partial_parsing/test_versioned_models.py b/tests/functional/partial_parsing/test_versioned_models.py deleted file mode 100644 index d725c6718..000000000 --- a/tests/functional/partial_parsing/test_versioned_models.py +++ /dev/null @@ -1,128 +0,0 @@ -import pathlib - -from dbt.exceptions import DuplicateVersionedUnversionedError -from dbt.tests.util import ( - get_manifest, - read_file, - rm_file, - run_dbt, - write_file, -) -import pytest - - -model_one_sql = """ -select 1 as fun -""" - -model_one_downstream_sql = """ -select fun from {{ ref('model_one') }} -""" - -models_versions_schema_yml = """ - -models: - - name: model_one - description: "The first model" - versions: - - v: 1 - - v: 2 -""" - -models_versions_defined_in_schema_yml = """ -models: - - name: model_one - description: "The first model" - versions: - - v: 1 - - v: 2 - defined_in: model_one_different -""" - -models_versions_updated_schema_yml = """ -models: - - name: model_one - latest_version: 1 - description: "The first model" - versions: - - v: 1 - - v: 2 - defined_in: model_one_different -""" - -model_two_sql = """ -select 1 as notfun -""" - - -class TestVersionedModels: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one_v1.sql": model_one_sql, - "model_one.sql": model_one_sql, - "model_one_downstream.sql": model_one_downstream_sql, - "schema.yml": models_versions_schema_yml, - } - - def test_pp_versioned_models(self, project): - results = run_dbt(["run"]) - assert len(results) == 3 - - manifest = get_manifest(project.project_root) - model_one_node = manifest.nodes["model.test.model_one.v1"] - assert not model_one_node.is_latest_version - model_two_node = manifest.nodes["model.test.model_one.v2"] - assert model_two_node.is_latest_version - # assert unpinned ref points to latest version - model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"] - assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v2"] - - # update schema.yml block - model_one is now 'defined_in: model_one_different' - rm_file(project.project_root, "models", "model_one.sql") - write_file(model_one_sql, project.project_root, "models", "model_one_different.sql") - write_file( - models_versions_defined_in_schema_yml, project.project_root, "models", "schema.yml" - ) - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - - # update versions schema.yml block - latest_version from 2 to 1 - write_file( - models_versions_updated_schema_yml, project.project_root, "models", "schema.yml" - ) - # This is where the test was failings in a CI run with: - # relation \"test..._test_partial_parsing.model_one_downstream\" does not exist - # because in core/dbt/include/global_project/macros/materializations/models/view/view.sql - # "existing_relation" didn't actually exist by the time it gets to the rename of the - # existing relation. - (pathlib.Path(project.project_root) / "log_output").mkdir(parents=True, exist_ok=True) - results = run_dbt( - ["--partial-parse", "--log-format-file", "json", "--log-path", "log_output", "run"] - ) - assert len(results) == 3 - - manifest = get_manifest(project.project_root) - model_one_node = manifest.nodes["model.test.model_one.v1"] - assert model_one_node.is_latest_version - model_two_node = manifest.nodes["model.test.model_one.v2"] - assert not model_two_node.is_latest_version - # assert unpinned ref points to latest version - model_one_downstream_node = manifest.nodes["model.test.model_one_downstream"] - assert model_one_downstream_node.depends_on.nodes == ["model.test.model_one.v1"] - - # assert unpinned ref to latest-not-max version yields an "FYI" info-level log - log_output = read_file("log_output", "dbt.log").replace("\n", " ").replace("\\n", " ") - assert "UnpinnedRefNewVersionAvailable" in log_output - - # update versioned model - write_file(model_two_sql, project.project_root, "models", "model_one_different.sql") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 3 - manifest = get_manifest(project.project_root) - assert len(manifest.nodes) == 3 - - # create a new model_one in model_one.sql and re-parse - write_file(model_one_sql, project.project_root, "models", "model_one.sql") - with pytest.raises(DuplicateVersionedUnversionedError): - run_dbt(["parse"]) From f67e02f80d9c8d76fac262fea1b41cf77342cb75 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 16 Apr 2024 15:05:38 -0400 Subject: [PATCH 054/114] Fix `psycopg2` changelog entry (#61) --- .changes/unreleased/Fixes-20240412-153154.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.changes/unreleased/Fixes-20240412-153154.yaml b/.changes/unreleased/Fixes-20240412-153154.yaml index 10bac271f..c64659aff 100644 --- a/.changes/unreleased/Fixes-20240412-153154.yaml +++ b/.changes/unreleased/Fixes-20240412-153154.yaml @@ -1,5 +1,5 @@ kind: Fixes -body: Determine `psycopg2` based on `platform_system` (Linux or other) +body: Determine `psycopg2` based on `platform_system` (Linux or other), remove usage of `DBT_PSYCOPG2_NAME` environment variable time: 2024-04-12T15:31:54.861201-04:00 custom: Author: mikealfare From 3e7940a1160e1af0fd1c2dcba6c8723cf22055eb Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Tue, 16 Apr 2024 17:11:59 -0500 Subject: [PATCH 055/114] manually update version, do changie batch and merge (#63) --- .changes/1.0.8-b3.md | 9 +++++++++ .changes/unreleased/Fixes-20240412-153154.yaml | 6 ------ .../unreleased/Under the Hood-20240410-180644.yaml | 6 ------ CHANGELOG.md | 11 +++++++++++ dbt/adapters/postgres/__version__.py | 2 +- 5 files changed, 21 insertions(+), 13 deletions(-) create mode 100644 .changes/1.0.8-b3.md delete mode 100644 .changes/unreleased/Fixes-20240412-153154.yaml delete mode 100644 .changes/unreleased/Under the Hood-20240410-180644.yaml diff --git a/.changes/1.0.8-b3.md b/.changes/1.0.8-b3.md new file mode 100644 index 000000000..d73520b5e --- /dev/null +++ b/.changes/1.0.8-b3.md @@ -0,0 +1,9 @@ +## dbt-postgres 1.0.8-b3 - April 16, 2024 + +### Fixes + +* Determine `psycopg2` based on `platform_system` (Linux or other), remove usage of `DBT_PSYCOPG2_NAME` environment variable + +### Under the Hood + +* Update dependabot configuration to cover GHA diff --git a/.changes/unreleased/Fixes-20240412-153154.yaml b/.changes/unreleased/Fixes-20240412-153154.yaml deleted file mode 100644 index c64659aff..000000000 --- a/.changes/unreleased/Fixes-20240412-153154.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Determine `psycopg2` based on `platform_system` (Linux or other), remove usage of `DBT_PSYCOPG2_NAME` environment variable -time: 2024-04-12T15:31:54.861201-04:00 -custom: - Author: mikealfare - Issue: "60" diff --git a/.changes/unreleased/Under the Hood-20240410-180644.yaml b/.changes/unreleased/Under the Hood-20240410-180644.yaml deleted file mode 100644 index 2f7eeda61..000000000 --- a/.changes/unreleased/Under the Hood-20240410-180644.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Update dependabot configuration to cover GHA -time: 2024-04-10T18:06:44.884603-04:00 -custom: - Author: mikealfare - Issue: "56" diff --git a/CHANGELOG.md b/CHANGELOG.md index 29a312343..6c13880e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,3 +19,14 @@ and is generated by [Changie](https://github.com/miniscruff/changie). ### Security * Pin `black>=24.3` in `pyproject.toml` + +## dbt-postgres 1.0.8-b3 - April 16, 2024 + +### Fixes + +* Determine `psycopg2` based on `platform_system` (Linux or other), remove usage of `DBT_PSYCOPG2_NAME` environment variable + +### Under the Hood + +* Update dependabot configuration to cover GHA + diff --git a/dbt/adapters/postgres/__version__.py b/dbt/adapters/postgres/__version__.py index 7d16c28f0..b0f82cbca 100644 --- a/dbt/adapters/postgres/__version__.py +++ b/dbt/adapters/postgres/__version__.py @@ -1 +1 @@ -version = "1.8.0b2" +version = "1.8.0b3" From f40554d7fa2132ee731ee8df7e0c485e76027d3c Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Tue, 16 Apr 2024 18:42:41 -0500 Subject: [PATCH 056/114] bumping to beta4 due to issue during release (#65) --- dbt/adapters/postgres/__version__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbt/adapters/postgres/__version__.py b/dbt/adapters/postgres/__version__.py index b0f82cbca..6b76061fd 100644 --- a/dbt/adapters/postgres/__version__.py +++ b/dbt/adapters/postgres/__version__.py @@ -1 +1 @@ -version = "1.8.0b3" +version = "1.8.0b4" From 333a1e59bed45dc2c42feab409154791812b1c99 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Fri, 19 Apr 2024 13:19:08 -0400 Subject: [PATCH 057/114] Add and configure `pre-commit` (#62) --- .github/workflows/code-quality.yml | 28 +- .github/workflows/integration-tests.yml | 9 - .github/workflows/unit-tests.yml | 8 - .pre-commit-config.yaml | 55 +++ CHANGELOG.md | 1 - CONTRIBUTING.md | 2 +- dbt/adapters/postgres/relation.py | 30 +- .../postgres/relation_configs/index.py | 2 +- pyproject.toml | 72 +--- tests/functional/artifacts/test_artifacts.py | 11 - .../artifacts/test_previous_version_state.py | 2 +- .../context_methods/test_builtin_functions.py | 1 - tests/functional/dbt_debug/test_dbt_debug.py | 5 +- tests/functional/dbt_runner.py | 4 +- tests/functional/profiles/test_profile_dir.py | 168 -------- .../functional/profiles/test_profiles_yml.py | 65 --- .../schema_tests/test_schema_v2_tests.py | 4 +- .../shared_tests/test_hooks/test_hooks.py | 1 + .../test_simple_seed/test_simple_seed.py | 1 + tests/functional/source_overrides/fixtures.py | 387 ------------------ .../test_simple_source_override.py | 146 ------- .../test_source_overrides_duplicate_model.py | 68 --- tests/functional/test_connection_manager.py | 7 +- tests/functional/test_dbt_runner.py | 14 +- tests/functional/test_init.py | 1 - tests/unit/test_connection.py | 6 +- tests/unit/test_renamed_relations.py | 12 +- 27 files changed, 121 insertions(+), 989 deletions(-) create mode 100644 .pre-commit-config.yaml delete mode 100644 tests/functional/profiles/test_profile_dir.py delete mode 100644 tests/functional/profiles/test_profiles_yml.py delete mode 100644 tests/functional/source_overrides/fixtures.py delete mode 100644 tests/functional/source_overrides/test_simple_source_override.py delete mode 100644 tests/functional/source_overrides/test_source_overrides_duplicate_model.py diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index 081079fc3..1ca2ded01 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -9,16 +9,14 @@ on: workflow_dispatch: inputs: dbt_adapters_branch: - description: "The branch of dbt-adapters to evaluate" - type: string - required: true - default: "main" + description: "The branch of dbt-adapters to evaluate" + type: string + default: "main" workflow_call: inputs: dbt_adapters_branch: description: "The branch of dbt-adapters to evaluate" type: string - required: true default: "main" permissions: read-all @@ -27,16 +25,12 @@ permissions: read-all concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }} cancel-in-progress: true -env: - # set DBT_ADAPTERS_BRANCH to the input value if the event is a workflow_dispatch (workflow_call uses the same event_name), - # otherwise use 'main' - DBT_ADAPTERS_BRANCH: ${{ github.event_name == 'workflow_dispatch' && inputs.dbt_adapters_branch || 'main' }} + jobs: code-quality: name: Code Quality runs-on: ubuntu-latest - steps: - name: Check out repository uses: actions/checkout@v4 @@ -44,17 +38,13 @@ jobs: persist-credentials: false - name: Update Adapters and Core branches + if: ${{ contains(github.event_name, 'workflow_') }} shell: bash - run: | - ./.github/scripts/update_dev_packages.sh \ - $DBT_ADAPTERS_BRANCH \ - "main" + run: ./.github/scripts/update_dev_packages.sh ${{ inputs.dbt_adapters_branch }} "main" - name: Setup `hatch` uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main - - name: Run linters - run: hatch run lint:all - - - name: Run typechecks - run: hatch run typecheck:all + - name: Run code quality + shell: bash + run: hatch run code-quality diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 6bb37ec51..4bb423fae 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -31,7 +31,6 @@ on: required: false default: "main" - permissions: read-all # will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise @@ -101,14 +100,6 @@ jobs: POSTGRES_TEST_DATABASE: dbt POSTGRES_TEST_THREADS: 4 - - name: Publish results - uses: dbt-labs/dbt-adapters/.github/actions/publish-results@main - if: always() - with: - source-file: "results.csv" - file-name: "integration_results" - python-version: ${{ matrix.python-version }} - psycopg2-check: name: "Test psycopg2 build version" runs-on: ${{ matrix.scenario.platform }} diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 69681c0fa..91d8c5783 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -39,11 +39,3 @@ jobs: - name: Run unit tests run: hatch run unit-tests:all shell: bash - - - name: Publish results - uses: dbt-labs/dbt-adapters/.github/actions/publish-results@main - if: always() - with: - source-file: "results.csv" - file-name: "unit_results" - python-version: ${{ matrix.python-version }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..c869fe86e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,55 @@ +default_language_version: + python: python3 + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-yaml + args: [--unsafe] + - id: check-json + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + +- repo: https://github.com/dbt-labs/pre-commit-hooks + rev: v0.1.0a1 + hooks: + - id: dbt-core-in-adapters-check + +- repo: https://github.com/psf/black + rev: 24.4.0 + hooks: + - id: black + args: + - --line-length=99 + - --target-version=py38 + - --target-version=py39 + - --target-version=py310 + - --target-version=py311 + +- repo: https://github.com/pycqa/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + exclude: tests/ + args: + - --max-line-length=99 + - --select=E,F,W + - --ignore=E203,E501,E741,W503,W504 + - --per-file-ignores=*/__init__.py:F401 + +- repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.9.0 + hooks: + - id: mypy + args: + - --explicit-package-bases + - --ignore-missing-imports + - --pretty + - --show-error-codes + files: ^dbt/adapters/postgres + additional_dependencies: + - types-PyYAML + - types-protobuf + - types-pytz diff --git a/CHANGELOG.md b/CHANGELOG.md index 6c13880e6..f9ef4153a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,4 +29,3 @@ and is generated by [Changie](https://github.com/miniscruff/changie). ### Under the Hood * Update dependabot configuration to cover GHA - diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f39e4cb5f..427d2de52 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -164,7 +164,7 @@ Remember to commit and push the file that's created. ### Signing the CLA -> **_NOTE:_** All contributors to `dbt-postgres` must sign the +> **_NOTE:_** All contributors to `dbt-postgres` must sign the > [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements)(CLA). Maintainers will be unable to merge contributions until the contributor signs the CLA. diff --git a/dbt/adapters/postgres/relation.py b/dbt/adapters/postgres/relation.py index 677b12ac6..05d552376 100644 --- a/dbt/adapters/postgres/relation.py +++ b/dbt/adapters/postgres/relation.py @@ -20,19 +20,23 @@ @dataclass(frozen=True, eq=False, repr=False) class PostgresRelation(BaseRelation): - renameable_relations: FrozenSet[RelationType] = field(default_factory=lambda: frozenset( - { - RelationType.View, - RelationType.Table, - RelationType.MaterializedView, - } - )) - replaceable_relations: FrozenSet[RelationType] = field(default_factory=lambda: frozenset( - { - RelationType.View, - RelationType.Table, - } - )) + renameable_relations: FrozenSet[RelationType] = field( + default_factory=lambda: frozenset( + { + RelationType.View, + RelationType.Table, + RelationType.MaterializedView, + } + ) + ) + replaceable_relations: FrozenSet[RelationType] = field( + default_factory=lambda: frozenset( + { + RelationType.View, + RelationType.Table, + } + ) + ) def __post_init__(self): # Check for length of Postgres table/view names. diff --git a/dbt/adapters/postgres/relation_configs/index.py b/dbt/adapters/postgres/relation_configs/index.py index c4863073d..0ed3e8aaf 100644 --- a/dbt/adapters/postgres/relation_configs/index.py +++ b/dbt/adapters/postgres/relation_configs/index.py @@ -23,7 +23,7 @@ class PostgresIndexMethod(StrEnum): @classmethod def default(cls) -> "PostgresIndexMethod": - return cls.btree + return cls("btree") @dataclass(frozen=True, eq=True, unsafe_hash=True) diff --git a/pyproject.toml b/pyproject.toml index 996ea46ab..0e93423cb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,56 +59,30 @@ path = "dbt/adapters/postgres/__version__.py" dependencies = [ "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", + 'pre-commit==3.7.0;python_version>="3.9"', + 'pre-commit==3.5.0;python_version=="3.8"', ] - -[tool.hatch.envs.lint] -detached = true -dependencies = [ - "black>=24.3", - "flake8", - "Flake8-pyproject", -] -[tool.hatch.envs.lint.scripts] -all = [ - "black", - "flake8", -] -black = "python -m black ." -flake8 = "python -m flake8 ." - -[tool.hatch.envs.typecheck] -dependencies = [ - "mypy", - "types-protobuf", - "types-pytz", -] -[tool.hatch.envs.typecheck.scripts] -all = "python -m mypy ." +[tool.hatch.envs.default.scripts] +dev = "pre-commit install" +code-quality = "pre-commit run --all-files" [tool.hatch.envs.unit-tests] dependencies = [ + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", "freezegun", "pytest", "pytest-dotenv", "pytest-mock", "pytest-xdist", ] -extra-dependencies = [ - # TODO: remove `dbt-core` dependencies from unit tests - "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", - "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", - "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", -] [tool.hatch.envs.unit-tests.scripts] all = "python -m pytest {args:tests/unit}" [tool.hatch.envs.integration-tests] template = "unit-tests" extra-dependencies = [ - # TODO: remove `dbt-core` dependencies from integration tests - "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", - "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", - "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", ] [tool.hatch.envs.integration-tests.env-vars] @@ -125,8 +99,6 @@ dependencies = [ "twine", "check-wheel-contents", ] - - [tool.hatch.envs.build.scripts] check-all = [ "- check-wheel", @@ -143,35 +115,9 @@ check-sdist = [ "pip freeze | grep dbt-postgres", ] -[tool.black] -line-length = 99 -target-version = ['py38'] - -[tool.flake8] -select = ["E", "W", "F"] -ignore = ["E203", "E501", "E741", "W503", "W504"] -exclude = ["tests", "venv", ".hatch_venvs"] -per-file-ignores = ["*/__init__.py: F401"] - -[tool.mypy] -namespace_packages = true -show_error_codes = true -explicit_package_bases = true -ignore_missing_imports = true -pretty = true -files = [ - "dbt/adapters/postgres", - "tests/unit", -] -exclude = [ - "tests/functional", - "venv", - ".hatch_venvs", -] - [tool.pytest] env_files = ["test.env"] testpaths = [ "tests/functional", "tests/unit", -] \ No newline at end of file +] diff --git a/tests/functional/artifacts/test_artifacts.py b/tests/functional/artifacts/test_artifacts.py index 756ee73f1..ca2c03f04 100644 --- a/tests/functional/artifacts/test_artifacts.py +++ b/tests/functional/artifacts/test_artifacts.py @@ -1,4 +1,3 @@ -from datetime import datetime import jsonschema import os @@ -12,16 +11,6 @@ ) import pytest -from tests.functional.artifacts.expected_manifest import ( - expected_references_manifest, - expected_seeded_manifest, - expected_versions_manifest, -) -from tests.functional.artifacts.expected_run_results import ( - expected_references_run_results, - expected_run_results, - expected_versions_run_results, -) from tests.functional.utils import run_dbt, run_dbt_and_capture diff --git a/tests/functional/artifacts/test_previous_version_state.py b/tests/functional/artifacts/test_previous_version_state.py index a19a2486e..5ed37b9a2 100644 --- a/tests/functional/artifacts/test_previous_version_state.py +++ b/tests/functional/artifacts/test_previous_version_state.py @@ -5,7 +5,7 @@ from dbt.artifacts.schemas.base import get_artifact_schema_version from dbt.artifacts.schemas.run import RunResultsArtifact from dbt.contracts.graph.manifest import WritableManifest -from dbt.exceptions import IncompatibleSchemaError +from dbt.artifacts.exceptions import IncompatibleSchemaError from dbt.tests.util import get_manifest import pytest diff --git a/tests/functional/context_methods/test_builtin_functions.py b/tests/functional/context_methods/test_builtin_functions.py index f5d7b92f0..b8a47b343 100644 --- a/tests/functional/context_methods/test_builtin_functions.py +++ b/tests/functional/context_methods/test_builtin_functions.py @@ -1,5 +1,4 @@ import json -import os from dbt.tests.util import write_file from dbt_common.exceptions import CompilationError diff --git a/tests/functional/dbt_debug/test_dbt_debug.py b/tests/functional/dbt_debug/test_dbt_debug.py index 3e2a182ee..e318322e2 100644 --- a/tests/functional/dbt_debug/test_dbt_debug.py +++ b/tests/functional/dbt_debug/test_dbt_debug.py @@ -1,9 +1,6 @@ import pytest -import os -import re -import yaml -from tests.functional.utils import run_dbt, run_dbt_and_capture +from tests.functional.utils import run_dbt_and_capture MODELS__MODEL_SQL = """ seled 1 as id diff --git a/tests/functional/dbt_runner.py b/tests/functional/dbt_runner.py index 01ebc8733..ba7aa2baf 100644 --- a/tests/functional/dbt_runner.py +++ b/tests/functional/dbt_runner.py @@ -25,13 +25,15 @@ def assert_run_results_have_compiled_node_attributes( class dbtTestRunner(dbtRunner): + exit_assertions: List[Callable[[List[str], dbtRunnerResult], None]] + def __init__( self, manifest: Optional[Manifest] = None, callbacks: Optional[List[Callable[[EventMsg], None]]] = None, exit_assertions: Optional[List[Callable[[List[str], dbtRunnerResult], None]]] = None, ): - self.exit_assertions = exit_assertions if exit_assertions else _STANDARD_ASSERTIONS + self.exit_assertions = exit_assertions if exit_assertions else _STANDARD_ASSERTIONS # type: ignore super().__init__(manifest, callbacks) def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult: diff --git a/tests/functional/profiles/test_profile_dir.py b/tests/functional/profiles/test_profile_dir.py deleted file mode 100644 index 282c978c9..000000000 --- a/tests/functional/profiles/test_profile_dir.py +++ /dev/null @@ -1,168 +0,0 @@ -from contextlib import contextmanager -import os -from pathlib import Path - -from dbt.tests.util import rm_file, write_file -import pytest -import yaml - -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -@pytest.fixture(scope="class") -def profiles_yml(profiles_root, dbt_profile_data): - write_file(yaml.safe_dump(dbt_profile_data), profiles_root, "profiles.yml") - return dbt_profile_data - - -@pytest.fixture(scope="class") -def profiles_home_root(): - return os.path.join(os.path.expanduser("~"), ".dbt") - - -@pytest.fixture(scope="class") -def profiles_env_root(tmpdir_factory): - path = tmpdir_factory.mktemp("profile_env") - # environment variables are lowercased for some reason in _get_flag_value_from_env within dbt.flags - return str(path).lower() - - -@pytest.fixture(scope="class") -def profiles_flag_root(tmpdir_factory): - return tmpdir_factory.mktemp("profile_flag") - - -@pytest.fixture(scope="class") -def profiles_project_root(project): - return project.project_root - - -@pytest.fixture(scope="class") -def cwd(): - return os.getcwd() - - -@pytest.fixture(scope="class") -def cwd_parent(cwd): - return os.path.dirname(cwd) - - -@pytest.fixture(scope="class") -def cwd_child(): - # pick any child directory of the dbt project - return Path(os.getcwd()) / "macros" - - -@pytest.fixture -def write_profiles_yml(request): - def _write_profiles_yml(profiles_dir, dbt_profile_contents): - def cleanup(): - rm_file(Path(profiles_dir) / "profiles.yml") - - request.addfinalizer(cleanup) - write_file(yaml.safe_dump(dbt_profile_contents), profiles_dir, "profiles.yml") - - return _write_profiles_yml - - -# https://gist.github.com/igniteflow/7267431?permalink_comment_id=2551951#gistcomment-2551951 -@contextmanager -def environ(env): - """Temporarily set environment variables inside the context manager and - fully restore previous environment afterwards - """ - original_env = {key: os.getenv(key) for key in env} - os.environ.update(env) - try: - yield - finally: - for key, value in original_env.items(): - if value is None: - del os.environ[key] - else: - os.environ[key] = value - - -class TestProfilesMayNotExist: - def test_debug(self, project): - # The database will not be able to connect; expect neither a pass or a failure (but not an exception) - run_dbt(["debug", "--profiles-dir", "does_not_exist"], expect_pass=None) - - def test_deps(self, project): - run_dbt(["deps", "--profiles-dir", "does_not_exist"]) - - -class TestProfiles: - def dbt_debug(self, project_dir_cli_arg=None, profiles_dir_cli_arg=None): - # begin with no command-line args or user config (from profiles.yml) - # flags.set_from_args(Namespace(), {}) - command = ["debug"] - - if project_dir_cli_arg: - command.extend(["--project-dir", str(project_dir_cli_arg)]) - - if profiles_dir_cli_arg: - command.extend(["--profiles-dir", str(profiles_dir_cli_arg)]) - - # get the output of `dbt debug` regardless of the exit code - return run_dbt_and_capture(command, expect_pass=None) - - @pytest.mark.parametrize( - "project_dir_cli_arg, working_directory", - [ - # 3 different scenarios for `--project-dir` flag and current working directory - (None, "cwd"), # no --project-dir flag and cwd is project directory - (None, "cwd_child"), # no --project-dir flag and cwd is a project subdirectory - ("cwd", "cwd_parent"), # use --project-dir flag and cwd is outside of it - ], - ) - def test_profiles( - self, - project_dir_cli_arg, - working_directory, - write_profiles_yml, - dbt_profile_data, - profiles_home_root, - profiles_project_root, - profiles_flag_root, - profiles_env_root, - request, - ): - """Verify priority order to search for profiles.yml configuration. - - Reverse priority order: - 1. HOME directory - 2. DBT_PROFILES_DIR environment variable - 3. --profiles-dir command-line argument - - Specification later in this list will take priority over earlier ones, even when both are provided. - """ - - # https://pypi.org/project/pytest-lazy-fixture/ is an alternative to using request.getfixturevalue - if project_dir_cli_arg is not None: - project_dir_cli_arg = request.getfixturevalue(project_dir_cli_arg) - - if working_directory is not None: - working_directory = request.getfixturevalue(working_directory) - - # start in the specified directory - if working_directory is not None: - os.chdir(working_directory) - # default case with profiles.yml in the HOME directory - _, stdout = self.dbt_debug(project_dir_cli_arg) - assert f"Using profiles.yml file at {profiles_home_root}" in stdout - - # set DBT_PROFILES_DIR environment variable for the remainder of the cases - env_vars = {"DBT_PROFILES_DIR": profiles_env_root} - with environ(env_vars): - _, stdout = self.dbt_debug(project_dir_cli_arg) - assert f"Using profiles.yml file at {profiles_env_root}" in stdout - - # This additional case is also within the context manager because we want to verify - # that it takes priority even when the relevant environment variable is also set - - # set --profiles-dir on the command-line - _, stdout = self.dbt_debug( - project_dir_cli_arg, profiles_dir_cli_arg=profiles_flag_root - ) - assert f"Using profiles.yml file at {profiles_flag_root}" in stdout diff --git a/tests/functional/profiles/test_profiles_yml.py b/tests/functional/profiles/test_profiles_yml.py deleted file mode 100644 index c4eeabbd8..000000000 --- a/tests/functional/profiles/test_profiles_yml.py +++ /dev/null @@ -1,65 +0,0 @@ -from pathlib import Path - -from dbt.cli.main import dbtRunner -from test_profile_dir import environ - - -jinjaesque_password = "no{{jinja{%re{#ndering" - -profile_with_jinjaesque_password = f"""test: - outputs: - default: - dbname: my_db - host: localhost - password: {jinjaesque_password} - port: 12345 - schema: dummy - threads: 4 - type: postgres - user: peter.webb - target: default -""" - -profile_with_env_password = """test: - outputs: - default: - dbname: my_db - host: localhost - password: "{{ env_var('DBT_PASSWORD') }}" - port: 12345 - schema: dummy - threads: 4 - type: postgres - user: peter.webb - target: default -""" - - -class TestProfileParsing: - def write_profiles_yml(self, profiles_root, content) -> None: - with open(Path(profiles_root, "profiles.yml"), "w") as profiles_yml: - profiles_yml.write(content) - - def test_password_not_jinja_rendered_when_invalid(self, project, profiles_root) -> None: - """Verifies that passwords that contain Jinja control characters, but which are - not valid Jinja, do not cause errors.""" - self.write_profiles_yml(profiles_root, profile_with_jinjaesque_password) - - events = [] - result = dbtRunner(callbacks=[events.append]).invoke(["parse"]) - assert result.success - - for e in events: - assert "no{{jinja{%re{#ndering" not in e.info.msg - - def test_password_jinja_rendered_when_valid(self, project, profiles_root) -> None: - """Verifies that a password value that is valid Jinja is rendered as such, - and that it doesn't cause problems if the resulting value looks like Jinja""" - self.write_profiles_yml(profiles_root, profile_with_env_password) - - events = [] - with environ({"DBT_PASSWORD": jinjaesque_password}): - result = dbtRunner(callbacks=[events.append]).invoke(["parse"]) - - assert result.success - assert project.adapter.config.credentials.password == jinjaesque_password diff --git a/tests/functional/schema_tests/test_schema_v2_tests.py b/tests/functional/schema_tests/test_schema_v2_tests.py index a268f0960..aae164a1d 100644 --- a/tests/functional/schema_tests/test_schema_v2_tests.py +++ b/tests/functional/schema_tests/test_schema_v2_tests.py @@ -2,7 +2,7 @@ import re from dbt.contracts.results import TestStatus -from dbt.exceptions import ParsingError, DuplicateResourceNameError +from dbt.exceptions import ParsingError from dbt.tests.fixtures.project import write_project_files from dbt.tests.util import run_dbt, write_file from dbt_common.exceptions import CompilationError @@ -20,8 +20,6 @@ custom_generic_test_names__schema_yml, custom_generic_test_names_alt_format__model_a, custom_generic_test_names_alt_format__schema_yml, - dupe_generic_tests_collide__model_a, - dupe_generic_tests_collide__schema_yml, ephemeral__ephemeral_sql, ephemeral__schema_yml, invalid_schema_models__model_sql, diff --git a/tests/functional/shared_tests/test_hooks/test_hooks.py b/tests/functional/shared_tests/test_hooks/test_hooks.py index 843811307..7e832038c 100644 --- a/tests/functional/shared_tests/test_hooks/test_hooks.py +++ b/tests/functional/shared_tests/test_hooks/test_hooks.py @@ -2,6 +2,7 @@ This file needs to be in its own directory because it uses a `data` directory. Placing this file in its own directory avoids collisions. """ + from dbt.tests.adapter.hooks.test_model_hooks import ( BasePrePostModelHooks, BaseHookRefs, diff --git a/tests/functional/shared_tests/test_simple_seed/test_simple_seed.py b/tests/functional/shared_tests/test_simple_seed/test_simple_seed.py index cd8497883..61664ca9a 100644 --- a/tests/functional/shared_tests/test_simple_seed/test_simple_seed.py +++ b/tests/functional/shared_tests/test_simple_seed/test_simple_seed.py @@ -2,6 +2,7 @@ This file needs to be in its own directory because it creates a `data` directory at run time. Placing this file in its own directory avoids collisions. """ + from dbt.tests.adapter.simple_seed.test_seed import ( BaseBasicSeedTests, BaseSeedConfigFullRefreshOn, diff --git a/tests/functional/source_overrides/fixtures.py b/tests/functional/source_overrides/fixtures.py deleted file mode 100644 index f7f49235d..000000000 --- a/tests/functional/source_overrides/fixtures.py +++ /dev/null @@ -1,387 +0,0 @@ -import pytest - - -dupe_models__schema2_yml = """ -version: 2 -sources: - - name: my_source - overrides: localdep - schema: "{{ target.schema }}" - database: "{{ target.database }}" - freshness: - error_after: {count: 3, period: day} - tables: - - name: my_table - freshness: null - identifier: my_real_seed - # on the override, the "color" column is only unique, it can be null! - columns: - - name: id - data_tests: - - not_null - - unique - - name: color - data_tests: - - unique - - name: my_other_table - freshness: null - identifier: my_real_other_seed - - name: snapshot_freshness - identifier: snapshot_freshness_base - - freshness: - error_after: {count: 1, period: day} - -""" - -dupe_models__schema1_yml = """ -version: 2 -sources: - - name: my_source - overrides: localdep - schema: "{{ target.schema }}" - database: "{{ target.database }}" - freshness: - error_after: {count: 3, period: day} - tables: - - name: my_table - freshness: null - identifier: my_real_seed - # on the override, the "color" column is only unique, it can be null! - columns: - - name: id - data_tests: - - not_null - - unique - - name: color - data_tests: - - unique - - name: my_other_table - freshness: null - identifier: my_real_other_seed - - name: snapshot_freshness - identifier: snapshot_freshness_base - loaded_at_field: updated_at - freshness: - error_after: {count: 1, period: day} - -""" - -local_dependency__dbt_project_yml = """ -config-version: 2 -name: localdep - -version: '1.0' - -profile: 'default' - -seeds: - quote_columns: False - -seed-paths: ['seeds'] - -""" - -local_dependency__models__schema_yml = """ -version: 2 -sources: - - name: my_source - schema: invalid_schema - database: invalid_database - freshness: - error_after: {count: 3, period: hour} - tables: - - name: my_table - freshness: null - identifier: my_seed - columns: - - name: id - data_tests: - - unique - - not_null - - name: color - data_tests: - - unique - - not_null - - name: my_other_table - identifier: my_other_seed - columns: - - name: id - data_tests: - - unique - - not_null - - name: letter - data_tests: - - unique - - not_null - - name: snapshot_freshness - identifier: snapshot_freshness_base - loaded_at_field: updated_at - freshness: - error_after: {count: 1, period: hour} - - name: my_other_source - schema: "{{ target.schema }}" - database: "{{ target.database }}" - freshness: - error_after: {count: 1, period: day} - tables: - - name: never_fresh - loaded_at_field: updated_at - -""" - -local_dependency__models__my_model_sql = """ - -{{ config(materialized="table") }} - -with colors as ( - select id, color from {{ source('my_source', 'my_table') }} -), -letters as ( - select id, letter from {{ source('my_source', 'my_other_table') }} -) -select letter, color from colors join letters using (id) - -""" - -local_dependency__seeds__my_other_seed_csv = """id,letter -1,r -2,g -3,b -""" - -local_dependency__seeds__my_seed_csv = """id,color -1,red -2,green -3,blue -""" - -local_dependency__seeds__keep__never_fresh_csv = """favorite_color,id,first_name,email,ip_address,updated_at -blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31 -blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14 -blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57 -blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43 -blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29 -blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50 -blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59 -blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15 -blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48 -blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49 -blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11 -blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14 -blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36 -blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04 -blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56 -blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47 -blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35 -blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57 -blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15 -blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05 -blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49 -blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56 -blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43 -blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44 -blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02 -blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25 -blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18 -blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37 -blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12 -blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23 -blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50 -blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27 -blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44 -blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05 -blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39 -blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52 -blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25 -blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51 -blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29 -blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54 -blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56 -blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31 -blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28 -blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07 -blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15 -blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45 -blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19 -blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24 -blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48 -blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49 -""" - -local_dependency__seeds__keep__snapshot_freshness_base_csv = """favorite_color,id,first_name,email,ip_address,updated_at -blue,1,Larry,lking0@miitbeian.gov.cn,'69.135.206.194',2008-09-12 19:08:31 -blue,2,Larry,lperkins1@toplist.cz,'64.210.133.162',1978-05-09 04:15:14 -blue,3,Anna,amontgomery2@miitbeian.gov.cn,'168.104.64.114',2011-10-16 04:07:57 -blue,4,Sandra,sgeorge3@livejournal.com,'229.235.252.98',1973-07-19 10:52:43 -blue,5,Fred,fwoods4@google.cn,'78.229.170.124',2012-09-30 16:38:29 -blue,6,Stephen,shanson5@livejournal.com,'182.227.157.105',1995-11-07 21:40:50 -blue,7,William,wmartinez6@upenn.edu,'135.139.249.50',1982-09-05 03:11:59 -blue,8,Jessica,jlong7@hao123.com,'203.62.178.210',1991-10-16 11:03:15 -blue,9,Douglas,dwhite8@tamu.edu,'178.187.247.1',1979-10-01 09:49:48 -blue,10,Lisa,lcoleman9@nydailynews.com,'168.234.128.249',2011-05-26 07:45:49 -blue,11,Ralph,rfieldsa@home.pl,'55.152.163.149',1972-11-18 19:06:11 -blue,12,Louise,lnicholsb@samsung.com,'141.116.153.154',2014-11-25 20:56:14 -blue,13,Clarence,cduncanc@sfgate.com,'81.171.31.133',2011-11-17 07:02:36 -blue,14,Daniel,dfranklind@omniture.com,'8.204.211.37',1980-09-13 00:09:04 -blue,15,Katherine,klanee@auda.org.au,'176.96.134.59',1997-08-22 19:36:56 -blue,16,Billy,bwardf@wikia.com,'214.108.78.85',2003-10-19 02:14:47 -blue,17,Annie,agarzag@ocn.ne.jp,'190.108.42.70',1988-10-28 15:12:35 -blue,18,Shirley,scolemanh@fastcompany.com,'109.251.164.84',1988-08-24 10:50:57 -blue,19,Roger,rfrazieri@scribd.com,'38.145.218.108',1985-12-31 15:17:15 -blue,20,Lillian,lstanleyj@goodreads.com,'47.57.236.17',1970-06-08 02:09:05 -blue,21,Aaron,arodriguezk@nps.gov,'205.245.118.221',1985-10-11 23:07:49 -blue,22,Patrick,pparkerl@techcrunch.com,'19.8.100.182',2006-03-29 12:53:56 -blue,23,Phillip,pmorenom@intel.com,'41.38.254.103',2011-11-07 15:35:43 -blue,24,Henry,hgarcian@newsvine.com,'1.191.216.252',2008-08-28 08:30:44 -blue,25,Irene,iturnero@opera.com,'50.17.60.190',1994-04-01 07:15:02 -blue,26,Andrew,adunnp@pen.io,'123.52.253.176',2000-11-01 06:03:25 -blue,27,David,dgutierrezq@wp.com,'238.23.203.42',1988-01-25 07:29:18 -blue,28,Henry,hsanchezr@cyberchimps.com,'248.102.2.185',1983-01-01 13:36:37 -blue,29,Evelyn,epetersons@gizmodo.com,'32.80.46.119',1979-07-16 17:24:12 -blue,30,Tammy,tmitchellt@purevolume.com,'249.246.167.88',2001-04-03 10:00:23 -blue,31,Jacqueline,jlittleu@domainmarket.com,'127.181.97.47',1986-02-11 21:35:50 -blue,32,Earl,eortizv@opera.com,'166.47.248.240',1996-07-06 08:16:27 -blue,33,Juan,jgordonw@sciencedirect.com,'71.77.2.200',1987-01-31 03:46:44 -blue,34,Diane,dhowellx@nyu.edu,'140.94.133.12',1994-06-11 02:30:05 -blue,35,Randy,rkennedyy@microsoft.com,'73.255.34.196',2005-05-26 20:28:39 -blue,36,Janice,jriveraz@time.com,'22.214.227.32',1990-02-09 04:16:52 -blue,37,Laura,lperry10@diigo.com,'159.148.145.73',2015-03-17 05:59:25 -blue,38,Gary,gray11@statcounter.com,'40.193.124.56',1970-01-27 10:04:51 -blue,39,Jesse,jmcdonald12@typepad.com,'31.7.86.103',2009-03-14 08:14:29 -blue,40,Sandra,sgonzalez13@goodreads.com,'223.80.168.239',1993-05-21 14:08:54 -blue,41,Scott,smoore14@archive.org,'38.238.46.83',1980-08-30 11:16:56 -blue,42,Phillip,pevans15@cisco.com,'158.234.59.34',2011-12-15 23:26:31 -blue,43,Steven,sriley16@google.ca,'90.247.57.68',2011-10-29 19:03:28 -blue,44,Deborah,dbrown17@hexun.com,'179.125.143.240',1995-04-10 14:36:07 -blue,45,Lori,lross18@ow.ly,'64.80.162.180',1980-12-27 16:49:15 -blue,46,Sean,sjackson19@tumblr.com,'240.116.183.69',1988-06-12 21:24:45 -blue,47,Terry,tbarnes1a@163.com,'118.38.213.137',1997-09-22 16:43:19 -blue,48,Dorothy,dross1b@ebay.com,'116.81.76.49',2005-02-28 13:33:24 -blue,49,Samuel,swashington1c@house.gov,'38.191.253.40',1989-01-19 21:15:48 -blue,50,Ralph,rcarter1d@tinyurl.com,'104.84.60.174',2007-08-11 10:21:49 -green,51,Wayne,whudson1e@princeton.edu,'90.61.24.102',1983-07-03 16:58:12 -green,52,Rose,rjames1f@plala.or.jp,'240.83.81.10',1995-06-08 11:46:23 -green,53,Louise,lcox1g@theglobeandmail.com,'105.11.82.145',2016-09-19 14:45:51 -green,54,Kenneth,kjohnson1h@independent.co.uk,'139.5.45.94',1976-08-17 11:26:19 -green,55,Donna,dbrown1i@amazon.co.uk,'19.45.169.45',2006-05-27 16:51:40 -green,56,Johnny,jvasquez1j@trellian.com,'118.202.238.23',1975-11-17 08:42:32 -green,57,Patrick,pramirez1k@tamu.edu,'231.25.153.198',1997-08-06 11:51:09 -green,58,Helen,hlarson1l@prweb.com,'8.40.21.39',1993-08-04 19:53:40 -green,59,Patricia,pspencer1m@gmpg.org,'212.198.40.15',1977-08-03 16:37:27 -green,60,Joseph,jspencer1n@marriott.com,'13.15.63.238',2005-07-23 20:22:06 -green,61,Phillip,pschmidt1o@blogtalkradio.com,'177.98.201.190',1976-05-19 21:47:44 -green,62,Joan,jwebb1p@google.ru,'105.229.170.71',1972-09-07 17:53:47 -green,63,Phyllis,pkennedy1q@imgur.com,'35.145.8.244',2000-01-01 22:33:37 -green,64,Katherine,khunter1r@smh.com.au,'248.168.205.32',1991-01-09 06:40:24 -green,65,Laura,lvasquez1s@wiley.com,'128.129.115.152',1997-10-23 12:04:56 -green,66,Juan,jdunn1t@state.gov,'44.228.124.51',2004-11-10 05:07:35 -green,67,Judith,jholmes1u@wiley.com,'40.227.179.115',1977-08-02 17:01:45 -green,68,Beverly,bbaker1v@wufoo.com,'208.34.84.59',2016-03-06 20:07:23 -green,69,Lawrence,lcarr1w@flickr.com,'59.158.212.223',1988-09-13 06:07:21 -green,70,Gloria,gwilliams1x@mtv.com,'245.231.88.33',1995-03-18 22:32:46 -green,71,Steven,ssims1y@cbslocal.com,'104.50.58.255',2001-08-05 21:26:20 -green,72,Betty,bmills1z@arstechnica.com,'103.177.214.220',1981-12-14 21:26:54 -green,73,Mildred,mfuller20@prnewswire.com,'151.158.8.130',2000-04-19 10:13:55 -green,74,Donald,dday21@icq.com,'9.178.102.255',1972-12-03 00:58:24 -green,75,Eric,ethomas22@addtoany.com,'85.2.241.227',1992-11-01 05:59:30 -green,76,Joyce,jarmstrong23@sitemeter.com,'169.224.20.36',1985-10-24 06:50:01 -green,77,Maria,mmartinez24@amazonaws.com,'143.189.167.135',2005-10-05 05:17:42 -green,78,Harry,hburton25@youtube.com,'156.47.176.237',1978-03-26 05:53:33 -green,79,Kevin,klawrence26@hao123.com,'79.136.183.83',1994-10-12 04:38:52 -green,80,David,dhall27@prweb.com,'133.149.172.153',1976-12-15 16:24:24 -green,81,Kathy,kperry28@twitter.com,'229.242.72.228',1979-03-04 02:58:56 -green,82,Adam,aprice29@elegantthemes.com,'13.145.21.10',1982-11-07 11:46:59 -green,83,Brandon,bgriffin2a@va.gov,'73.249.128.212',2013-10-30 05:30:36 -green,84,Henry,hnguyen2b@discovery.com,'211.36.214.242',1985-01-09 06:37:27 -green,85,Eric,esanchez2c@edublogs.org,'191.166.188.251',2004-05-01 23:21:42 -green,86,Jason,jlee2d@jimdo.com,'193.92.16.182',1973-01-08 09:05:39 -green,87,Diana,drichards2e@istockphoto.com,'19.130.175.245',1994-10-05 22:50:49 -green,88,Andrea,awelch2f@abc.net.au,'94.155.233.96',2002-04-26 08:41:44 -green,89,Louis,lwagner2g@miitbeian.gov.cn,'26.217.34.111',2003-08-25 07:56:39 -green,90,Jane,jsims2h@seesaa.net,'43.4.220.135',1987-03-20 20:39:04 -green,91,Larry,lgrant2i@si.edu,'97.126.79.34',2000-09-07 20:26:19 -green,92,Louis,ldean2j@prnewswire.com,'37.148.40.127',2011-09-16 20:12:14 -green,93,Jennifer,jcampbell2k@xing.com,'38.106.254.142',1988-07-15 05:06:49 -green,94,Wayne,wcunningham2l@google.com.hk,'223.28.26.187',2009-12-15 06:16:54 -green,95,Lori,lstevens2m@icq.com,'181.250.181.58',1984-10-28 03:29:19 -green,96,Judy,jsimpson2n@marriott.com,'180.121.239.219',1986-02-07 15:18:10 -green,97,Phillip,phoward2o@usa.gov,'255.247.0.175',2002-12-26 08:44:45 -green,98,Gloria,gwalker2p@usa.gov,'156.140.7.128',1997-10-04 07:58:58 -green,99,Paul,pjohnson2q@umn.edu,'183.59.198.197',1991-11-14 12:33:55 -green,100,Frank,fgreene2r@blogspot.com,'150.143.68.121',2010-06-12 23:55:39 -""" - -models__schema_yml = """ -version: 2 -sources: - - name: my_source - overrides: localdep - schema: "{{ target.schema }}" - database: "{{ target.database }}" - freshness: - error_after: {count: 3, period: day} - tables: - - name: my_table - freshness: null - identifier: my_real_seed - # on the override, the "color" column is only unique, it can be null! - columns: - - name: id - data_tests: - - not_null - - unique - - name: color - data_tests: - - unique - - name: my_other_table - freshness: null - identifier: my_real_other_seed - - name: snapshot_freshness - identifier: snapshot_freshness_base - loaded_at_field: updated_at - freshness: - error_after: {count: 1, period: day} - -""" - -seeds__expected_result_csv = """letter,color -c,cyan -m,magenta -y,yellow -k,key -""" - -seeds__my_real_other_seed_csv = """id,letter -1,c -2,m -3,y -4,k -""" - -seeds__my_real_seed_csv = """id,color -1,cyan -2,magenta -3,yellow -4,key -5,NULL -""" - - -@pytest.fixture(scope="class") -def local_dependency(): - return { - "dbt_project.yml": local_dependency__dbt_project_yml, - "models": { - "schema.yml": local_dependency__models__schema_yml, - "my_model.sql": local_dependency__models__my_model_sql, - }, - "seeds": { - "my_other_seed.csv": local_dependency__seeds__my_other_seed_csv, - "my_seed.csv": local_dependency__seeds__my_seed_csv, - "keep": { - "never_fresh.csv": local_dependency__seeds__keep__never_fresh_csv, - "snapshot_freshness_base.csv": local_dependency__seeds__keep__snapshot_freshness_base_csv, - }, - }, - } diff --git a/tests/functional/source_overrides/test_simple_source_override.py b/tests/functional/source_overrides/test_simple_source_override.py deleted file mode 100644 index d1cd3352e..000000000 --- a/tests/functional/source_overrides/test_simple_source_override.py +++ /dev/null @@ -1,146 +0,0 @@ -from datetime import datetime, timedelta - -from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import check_relations_equal, run_dbt, update_config_file -import pytest - -from tests.functional.source_overrides.fixtures import ( - local_dependency, - models__schema_yml, - seeds__expected_result_csv, - seeds__my_real_other_seed_csv, - seeds__my_real_seed_csv, -) - - -class TestSourceOverride: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root, local_dependency): # noqa: F811 - write_project_files(project_root, "local_dependency", local_dependency) - - @pytest.fixture(scope="class") - def models(self): - return {"schema.yml": models__schema_yml} - - @pytest.fixture(scope="class") - def seeds(self): - return { - "expected_result.csv": seeds__expected_result_csv, - "my_real_other_seed.csv": seeds__my_real_other_seed_csv, - "my_real_seed.csv": seeds__my_real_seed_csv, - } - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "local": "local_dependency", - }, - ] - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "seeds": { - "localdep": { - "enabled": False, - "keep": { - "enabled": True, - }, - }, - "quote_columns": False, - }, - "sources": { - "localdep": { - "my_other_source": { - "enabled": False, - } - } - }, - } - - def _set_updated_at_to(self, insert_id, delta, project): - insert_time = datetime.utcnow() + delta - timestr = insert_time.strftime("%Y-%m-%d %H:%M:%S") - # favorite_color,id,first_name,email,ip_address,updated_at - - quoted_columns = ",".join( - project.adapter.quote(c) - for c in ("favorite_color", "id", "first_name", "email", "ip_address", "updated_at") - ) - - kwargs = { - "schema": project.test_schema, - "time": timestr, - "id": insert_id, - "source": project.adapter.quote("snapshot_freshness_base"), - "quoted_columns": quoted_columns, - } - - raw_code = """INSERT INTO {schema}.{source} - ({quoted_columns}) - VALUES ( - 'blue',{id},'Jake','abc@example.com','192.168.1.1','{time}' - )""".format( - **kwargs - ) - - project.run_sql(raw_code) - - return insert_id + 1 - - def test_source_overrides(self, project): - insert_id = 101 - - run_dbt(["deps"]) - - seed_results = run_dbt(["seed"]) - assert len(seed_results) == 5 - - # There should be 7, as we disabled 1 test of the original 8 - test_results = run_dbt(["test"]) - assert len(test_results) == 7 - - results = run_dbt(["run"]) - assert len(results) == 1 - - check_relations_equal(project.adapter, ["expected_result", "my_model"]) - - # set the updated_at field of this seed to last week - insert_id = self._set_updated_at_to(insert_id, timedelta(days=-7), project) - # if snapshot-freshness fails, freshness just didn't happen! - results = run_dbt(["source", "snapshot-freshness"], expect_pass=False) - # we disabled my_other_source, so we only run the one freshness check - # in - assert len(results) == 1 - # If snapshot-freshness passes, that means error_after was - # applied from the source override but not the source table override - insert_id = self._set_updated_at_to(insert_id, timedelta(days=-2), project) - results = run_dbt( - ["source", "snapshot-freshness"], - expect_pass=False, - ) - assert len(results) == 1 - - insert_id = self._set_updated_at_to(insert_id, timedelta(hours=-12), project) - results = run_dbt(["source", "snapshot-freshness"], expect_pass=True) - assert len(results) == 1 - - # update source to be enabled - new_source_config = { - "sources": { - "localdep": { - "my_other_source": { - "enabled": True, - } - } - } - } - update_config_file(new_source_config, project.project_root, "dbt_project.yml") - - # enable my_other_source, snapshot freshness should fail due to the new - # not-fresh source - results = run_dbt(["source", "snapshot-freshness"], expect_pass=False) - assert len(results) == 2 diff --git a/tests/functional/source_overrides/test_source_overrides_duplicate_model.py b/tests/functional/source_overrides/test_source_overrides_duplicate_model.py deleted file mode 100644 index 0a9ab0d8d..000000000 --- a/tests/functional/source_overrides/test_source_overrides_duplicate_model.py +++ /dev/null @@ -1,68 +0,0 @@ -import os - -from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.source_overrides.fixtures import ( - dupe_models__schema1_yml, - dupe_models__schema2_yml, - local_dependency, -) - - -class TestSourceOverrideDuplicates: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root, local_dependency): # noqa: F811 - write_project_files(project_root, "local_dependency", local_dependency) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema2.yml": dupe_models__schema2_yml, - "schema1.yml": dupe_models__schema1_yml, - } - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "local": "local_dependency", - }, - ] - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "seeds": { - "localdep": { - "enabled": False, - "keep": { - "enabled": True, - }, - }, - "quote_columns": False, - }, - "sources": { - "localdep": { - "my_other_source": { - "enabled": False, - } - } - }, - } - - def test_source_duplicate_overrides(self, project): - run_dbt(["deps"]) - with pytest.raises(CompilationError) as exc: - run_dbt(["compile"]) - - assert "dbt found two schema.yml entries for the same source named" in str(exc.value) - assert "one of these files" in str(exc.value) - schema1_path = os.path.join("models", "schema1.yml") - schema2_path = os.path.join("models", "schema2.yml") - assert schema1_path in str(exc.value) - assert schema2_path in str(exc.value) diff --git a/tests/functional/test_connection_manager.py b/tests/functional/test_connection_manager.py index 778b2fbbd..b44181ffd 100644 --- a/tests/functional/test_connection_manager.py +++ b/tests/functional/test_connection_manager.py @@ -1,6 +1,7 @@ from unittest import TestCase, mock -from dbt.adapters.contracts.connection import Connection +from dbt.adapters.contracts.connection import Connection, Identifier +from dbt_common.helper_types import Port import psycopg2 from dbt.adapters.postgres import PostgresCredentials, PostgresConnectionManager @@ -20,13 +21,13 @@ def get_connection(self) -> Connection: credentials = PostgresCredentials( host="localhost", user="test-user", - port=1111, + port=Port(1111), password="test-password", database="test-db", schema="test-schema", retries=2, ) - connection = Connection("postgres", None, credentials) + connection = Connection(Identifier("postgres"), None, credentials) return connection def test_open(self): diff --git a/tests/functional/test_dbt_runner.py b/tests/functional/test_dbt_runner.py index c1e05f0fa..d3db2d20b 100644 --- a/tests/functional/test_dbt_runner.py +++ b/tests/functional/test_dbt_runner.py @@ -13,21 +13,21 @@ def dbt(self) -> dbtRunner: def test_group_invalid_option(self, dbt: dbtRunner) -> None: res = dbt.invoke(["--invalid-option"]) - assert type(res.exception) == DbtUsageException + assert isinstance(res.exception, DbtUsageException) def test_command_invalid_option(self, dbt: dbtRunner) -> None: res = dbt.invoke(["deps", "--invalid-option"]) - assert type(res.exception) == DbtUsageException + assert isinstance(res.exception, DbtUsageException) def test_command_mutually_exclusive_option(self, dbt: dbtRunner) -> None: res = dbt.invoke(["--warn-error", "--warn-error-options", '{"include": "all"}', "deps"]) - assert type(res.exception) == DbtUsageException + assert isinstance(res.exception, DbtUsageException) res = dbt.invoke(["deps", "--warn-error", "--warn-error-options", '{"include": "all"}']) - assert type(res.exception) == DbtUsageException + assert isinstance(res.exception, DbtUsageException) def test_invalid_command(self, dbt: dbtRunner) -> None: res = dbt.invoke(["invalid-command"]) - assert type(res.exception) == DbtUsageException + assert isinstance(res.exception, DbtUsageException) def test_invoke_version(self, dbt: dbtRunner) -> None: dbt.invoke(["--version"]) @@ -57,14 +57,14 @@ def test_invoke_kwargs(self, project, dbt): def test_invoke_kwargs_project_dir(self, project, dbt): res = dbt.invoke(["run"], project_dir="some_random_project_dir") - assert type(res.exception) == DbtProjectError + assert isinstance(res.exception, DbtProjectError) msg = "No dbt_project.yml found at expected path some_random_project_dir" assert msg in res.exception.msg def test_invoke_kwargs_profiles_dir(self, project, dbt): res = dbt.invoke(["run"], profiles_dir="some_random_profiles_dir") - assert type(res.exception) == DbtProjectError + assert isinstance(res.exception, DbtProjectError) msg = "Could not find profile named 'test'" assert msg in res.exception.msg diff --git a/tests/functional/test_init.py b/tests/functional/test_init.py index 1c3133c84..1c8202b74 100644 --- a/tests/functional/test_init.py +++ b/tests/functional/test_init.py @@ -6,7 +6,6 @@ from dbt_common.exceptions import DbtRuntimeError from dbt.tests.util import run_dbt import pytest -import yaml class TestInitProjectWithExistingProfilesYml: diff --git a/tests/unit/test_connection.py b/tests/unit/test_connection.py index 0f46d2858..d98c1e5f4 100644 --- a/tests/unit/test_connection.py +++ b/tests/unit/test_connection.py @@ -2,7 +2,7 @@ from unittest import TestCase, mock import pytest -from dbt.context.manifest import generate_query_header_context +from dbt.context.query_header import generate_query_header_context from dbt.context.providers import generate_runtime_macro_context from dbt.contracts.files import FileHash from dbt.contracts.graph.manifest import ManifestStateCheck @@ -167,7 +167,7 @@ def test_quoting_on_rename(self): @pytest.mark.skip( """ We moved from __version__ to __about__ when establishing `hatch` as our build tool. - However, `adapters.factory.register_adapter` assumes __version__ when determining + However, `adapters.factory.register_adapter` assumes __version__ when determining the adapter version. This test causes an import error """ ) @@ -183,7 +183,7 @@ def test_debug_connection_fail_nopass(self): @pytest.mark.skip( """ We moved from __version__ to __about__ when establishing `hatch` as our build tool. - However, `adapters.factory.register_adapter` assumes __version__ when determining + However, `adapters.factory.register_adapter` assumes __version__ when determining the adapter version. This test causes an import error """ ) diff --git a/tests/unit/test_renamed_relations.py b/tests/unit/test_renamed_relations.py index 49900d8ef..29bbabf2c 100644 --- a/tests/unit/test_renamed_relations.py +++ b/tests/unit/test_renamed_relations.py @@ -9,8 +9,10 @@ def test_renameable_relation(): identifier="my_table", type=RelationType.Table, ) - assert relation.renameable_relations == frozenset({ - RelationType.View, - RelationType.Table, - RelationType.MaterializedView, - }) + assert relation.renameable_relations == frozenset( + { + RelationType.View, + RelationType.Table, + RelationType.MaterializedView, + } + ) From 118fbbdc26376dd13cf09b43b31e5eba4235bbe8 Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Fri, 19 Apr 2024 11:45:17 -0600 Subject: [PATCH 058/114] [CT-3267] [Feature] Debug log when `type_code` fails to convert to a `data_type` (#39) Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: colin-rogers-dbt <111200756+colin-rogers-dbt@users.noreply.github.com> Co-authored-by: Mike Alfare <mike.alfare@dbtlabs.com> --- .changes/unreleased/Features-20240323-160222.yaml | 6 ++++++ dbt/adapters/postgres/connections.py | 3 +++ .../contracts/test_nonstandard_data_type.py | 15 ++++++++++----- 3 files changed, 19 insertions(+), 5 deletions(-) create mode 100644 .changes/unreleased/Features-20240323-160222.yaml diff --git a/.changes/unreleased/Features-20240323-160222.yaml b/.changes/unreleased/Features-20240323-160222.yaml new file mode 100644 index 000000000..c5af1acad --- /dev/null +++ b/.changes/unreleased/Features-20240323-160222.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Debug log when `type_code` fails to convert to a `data_type` +time: 2024-03-23T16:02:22.153674-06:00 +custom: + Author: dbeatty10 + Issue: "8912" diff --git a/dbt/adapters/postgres/connections.py b/dbt/adapters/postgres/connections.py index 741f57019..83f269579 100644 --- a/dbt/adapters/postgres/connections.py +++ b/dbt/adapters/postgres/connections.py @@ -4,8 +4,10 @@ from dbt.adapters.contracts.connection import AdapterResponse, Credentials from dbt.adapters.events.logging import AdapterLogger +from dbt.adapters.events.types import TypeCodeNotFound from dbt.adapters.sql import SQLConnectionManager from dbt_common.exceptions import DbtDatabaseError, DbtRuntimeError +from dbt_common.events.functions import warn_or_error from dbt_common.helper_types import Port from mashumaro.jsonschema.annotations import Maximum, Minimum import psycopg2 @@ -203,4 +205,5 @@ def data_type_code_to_name(cls, type_code: Union[int, str]) -> str: if type_code in psycopg2.extensions.string_types: return psycopg2.extensions.string_types[type_code].name else: + warn_or_error(TypeCodeNotFound(type_code=type_code)) return f"unknown type_code {type_code}" diff --git a/tests/functional/contracts/test_nonstandard_data_type.py b/tests/functional/contracts/test_nonstandard_data_type.py index ee48bb3cd..e563f4d1f 100644 --- a/tests/functional/contracts/test_nonstandard_data_type.py +++ b/tests/functional/contracts/test_nonstandard_data_type.py @@ -1,6 +1,6 @@ import pytest -from tests.functional.utils import run_dbt, run_dbt_and_capture +from tests.functional.utils import run_dbt_and_capture my_numeric_model_sql = """ @@ -45,7 +45,9 @@ def models(self): } def test_nonstandard_data_type(self, project): - run_dbt(["run"], expect_pass=True) + expected_debug_msg = "The `type_code` 790 was not recognized" + _, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=True) + assert expected_debug_msg in logs class TestModelContractUnrecognizedTypeCodeActualMismatch: @@ -58,8 +60,10 @@ def models(self): def test_nonstandard_data_type(self, project): expected_msg = "unknown type_code 790 | DECIMAL | data type mismatch" - _, logs = run_dbt_and_capture(["run"], expect_pass=False) + expected_debug_msg = "The `type_code` 790 was not recognized" + _, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=False) assert expected_msg in logs + assert expected_debug_msg in logs class TestModelContractUnrecognizedTypeCodeExpectedMismatch: @@ -72,6 +76,7 @@ def models(self): def test_nonstandard_data_type(self, project): expected_msg = "DECIMAL | unknown type_code 790 | data type mismatch" - _, logs = run_dbt_and_capture(["run"], expect_pass=False) - print(logs) + expected_debug_msg = "The `type_code` 790 was not recognized" + _, logs = run_dbt_and_capture(["--debug", "run"], expect_pass=False) assert expected_msg in logs + assert expected_debug_msg in logs From 3f0091cece5d9b2474fda151840bc3fdde339068 Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Wed, 24 Apr 2024 11:28:31 -0500 Subject: [PATCH 059/114] Materialized Views not updating cache (#69) --- .../unreleased/Fixes-20240423-180916.yaml | 6 ++ dbt/adapters/postgres/relation.py | 1 - dbt/include/postgres/macros/adapters.sql | 2 +- .../test_postgres_materialized_view.py | 63 +++++++++++++++++++ tests/unit/test_renamed_relations.py | 1 - 5 files changed, 70 insertions(+), 3 deletions(-) create mode 100644 .changes/unreleased/Fixes-20240423-180916.yaml create mode 100644 tests/functional/materializations/materialized_view_tests/test_postgres_materialized_view.py diff --git a/.changes/unreleased/Fixes-20240423-180916.yaml b/.changes/unreleased/Fixes-20240423-180916.yaml new file mode 100644 index 000000000..48015bcb8 --- /dev/null +++ b/.changes/unreleased/Fixes-20240423-180916.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: remove materialized views from renambeable relation and remove a quote +time: 2024-04-23T18:09:16.865258-05:00 +custom: + Author: McKnight-42 + Issue: "127" diff --git a/dbt/adapters/postgres/relation.py b/dbt/adapters/postgres/relation.py index 05d552376..e546eab4b 100644 --- a/dbt/adapters/postgres/relation.py +++ b/dbt/adapters/postgres/relation.py @@ -25,7 +25,6 @@ class PostgresRelation(BaseRelation): { RelationType.View, RelationType.Table, - RelationType.MaterializedView, } ) ) diff --git a/dbt/include/postgres/macros/adapters.sql b/dbt/include/postgres/macros/adapters.sql index ee864e9b7..294443be2 100644 --- a/dbt/include/postgres/macros/adapters.sql +++ b/dbt/include/postgres/macros/adapters.sql @@ -39,7 +39,7 @@ on {{ relation }} {% if index_config.type -%} using {{ index_config.type }} {%- endif %} - ({{ comma_separated_columns }}); + ({{ comma_separated_columns }}) {%- endmacro %} {% macro postgres__create_schema(relation) -%} diff --git a/tests/functional/materializations/materialized_view_tests/test_postgres_materialized_view.py b/tests/functional/materializations/materialized_view_tests/test_postgres_materialized_view.py new file mode 100644 index 000000000..4ee44baea --- /dev/null +++ b/tests/functional/materializations/materialized_view_tests/test_postgres_materialized_view.py @@ -0,0 +1,63 @@ +import pytest +from dbt.tests.util import run_dbt + +SEED = """ +order_id,customer_id,total_amount,order_date +1,101,50.00,2024-04-01 +2,102,75.00,2024-04-02 +3,103,100.00,2024-04-03 +4,101,30.00,2024-04-04 +5,104,45.00,2024-04-05 +""".strip() + +ORDERS = """ +-- models/orders.sql +{{ + config( + materialized='materialized_view' + ) +}} +SELECT + order_id, + customer_id, + total_amount, + order_date +FROM + {{ ref('source_orders') }} +""" + +PRODUCT_SALES = """ +{{ + config( + materialized='materialized_view' + ) +}} +SELECT + order_id, + SUM(total_amount) AS total_sales_amount +FROM + {{ ref('orders') }} +GROUP BY + order_id +""" + + +class TestPostgresTestRefreshMaterializedView: + """ + this test addresses a issue in postgres around materialized views, + and renaming against a model who has dependent models that are also materialized views + related pr: https://github.com/dbt-labs/dbt-core/pull/9959 + """ + + @pytest.fixture(scope="class") + def models(self): + yield {"orders.sql": ORDERS, "product_sales.sql": PRODUCT_SALES} + + @pytest.fixture(scope="class") + def seeds(self): + yield {"source_orders.csv": SEED} + + def test_postgres_refresh_dependent_naterialized_views(self, project): + run_dbt(["seed"]) + run_dbt(["run", "--full-refresh"]) + run_dbt(["run", "--full-refresh"]) diff --git a/tests/unit/test_renamed_relations.py b/tests/unit/test_renamed_relations.py index 29bbabf2c..5d2bcb04d 100644 --- a/tests/unit/test_renamed_relations.py +++ b/tests/unit/test_renamed_relations.py @@ -13,6 +13,5 @@ def test_renameable_relation(): { RelationType.View, RelationType.Table, - RelationType.MaterializedView, } ) From 6a1897e6b00a60ae8c9ad762453b3a31ff244d2d Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 25 Apr 2024 10:23:02 -0400 Subject: [PATCH 060/114] Pin `macos` test runners to `macos-12` (#71) --- .github/workflows/integration-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 4bb423fae..1aefb7f5e 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -108,7 +108,7 @@ jobs: matrix: scenario: - {platform: ubuntu-latest, psycopg2-name: psycopg2} - - {platform: macos-latest, psycopg2-name: psycopg2-binary} + - {platform: macos-12, psycopg2-name: psycopg2-binary} steps: - name: "Check out repository" uses: actions/checkout@v4 From cf100560e7712dcc387e286057ffeeccb23a944e Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 25 Apr 2024 18:26:46 -0400 Subject: [PATCH 061/114] [Bug] Use a list instead of a set for index changes to perserve order (#73) Co-authored-by: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> --- .../unreleased/Fixes-20240425-133401.yaml | 6 ++ dbt/adapters/postgres/relation.py | 20 ++++--- .../relation_configs/materialized_view.py | 4 +- tests/unit/test_materialized_view.py | 60 +++++++++++++++++++ 4 files changed, 80 insertions(+), 10 deletions(-) create mode 100644 .changes/unreleased/Fixes-20240425-133401.yaml create mode 100644 tests/unit/test_materialized_view.py diff --git a/.changes/unreleased/Fixes-20240425-133401.yaml b/.changes/unreleased/Fixes-20240425-133401.yaml new file mode 100644 index 000000000..cb6d14da3 --- /dev/null +++ b/.changes/unreleased/Fixes-20240425-133401.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Replace usage of `Set` with `List` to fix issue with index updates intermittently happening out of order +time: 2024-04-25T13:34:01.018399-04:00 +custom: + Author: mikealfare + Issue: "72" diff --git a/dbt/adapters/postgres/relation.py b/dbt/adapters/postgres/relation.py index e546eab4b..e8128c462 100644 --- a/dbt/adapters/postgres/relation.py +++ b/dbt/adapters/postgres/relation.py @@ -1,5 +1,5 @@ from dataclasses import dataclass, field -from typing import FrozenSet, Optional, Set +from typing import FrozenSet, List, Optional from dbt.adapters.base.relation import BaseRelation from dbt.adapters.contracts.relation import RelationConfig, RelationType @@ -79,7 +79,7 @@ def _get_index_config_changes( self, existing_indexes: FrozenSet[PostgresIndexConfig], new_indexes: FrozenSet[PostgresIndexConfig], - ) -> Set[PostgresIndexConfigChange]: + ) -> List[PostgresIndexConfigChange]: """ Get the index updates that will occur as a result of a new run @@ -90,18 +90,22 @@ def _get_index_config_changes( 3. Index is old -> drop these 4. Indexes are not equal -> drop old, create new -> two actions - Returns: a set of index updates in the form {"action": "drop/create", "context": <IndexConfig>} + *Note:* + The order of the operations matters here because if the same index is dropped and recreated + (e.g. via --full-refresh) then we need to drop it first, then create it. + + Returns: an ordered list of index updates in the form {"action": "drop/create", "context": <IndexConfig>} """ - drop_changes = set( + drop_changes = [ PostgresIndexConfigChange.from_dict( {"action": RelationConfigChangeAction.drop, "context": index} ) for index in existing_indexes.difference(new_indexes) - ) - create_changes = set( + ] + create_changes = [ PostgresIndexConfigChange.from_dict( {"action": RelationConfigChangeAction.create, "context": index} ) for index in new_indexes.difference(existing_indexes) - ) - return set().union(drop_changes, create_changes) # type: ignore + ] + return drop_changes + create_changes diff --git a/dbt/adapters/postgres/relation_configs/materialized_view.py b/dbt/adapters/postgres/relation_configs/materialized_view.py index 3563833e2..8eaccbbfb 100644 --- a/dbt/adapters/postgres/relation_configs/materialized_view.py +++ b/dbt/adapters/postgres/relation_configs/materialized_view.py @@ -101,7 +101,7 @@ def parse_relation_results(cls, relation_results: RelationResults) -> dict: @dataclass class PostgresMaterializedViewConfigChangeCollection: - indexes: Set[PostgresIndexConfigChange] = field(default_factory=set) + indexes: List[PostgresIndexConfigChange] = field(default_factory=list) @property def requires_full_refresh(self) -> bool: @@ -109,4 +109,4 @@ def requires_full_refresh(self) -> bool: @property def has_changes(self) -> bool: - return self.indexes != set() + return self.indexes != [] diff --git a/tests/unit/test_materialized_view.py b/tests/unit/test_materialized_view.py new file mode 100644 index 000000000..dc4f822bb --- /dev/null +++ b/tests/unit/test_materialized_view.py @@ -0,0 +1,60 @@ +from copy import deepcopy + +from dbt.adapters.contracts.relation import RelationType +from dbt.adapters.relation_configs.config_change import RelationConfigChangeAction + +from dbt.adapters.postgres.relation import PostgresRelation +from dbt.adapters.postgres.relation_configs import PostgresIndexConfig + + +def test_index_config_changes(): + index_0_old = { + "name": "my_index_0", + "column_names": {"column_0"}, + "unique": True, + "method": "btree", + } + index_1_old = { + "name": "my_index_1", + "column_names": {"column_1"}, + "unique": True, + "method": "btree", + } + index_2_old = { + "name": "my_index_2", + "column_names": {"column_2"}, + "unique": True, + "method": "btree", + } + existing_indexes = frozenset( + PostgresIndexConfig.from_dict(index) for index in [index_0_old, index_1_old, index_2_old] + ) + + index_0_new = deepcopy(index_0_old) + index_2_new = deepcopy(index_2_old) + index_2_new.update(method="hash") + index_3_new = { + "name": "my_index_3", + "column_names": {"column_3"}, + "unique": True, + "method": "hash", + } + new_indexes = frozenset( + PostgresIndexConfig.from_dict(index) for index in [index_0_new, index_2_new, index_3_new] + ) + + relation = PostgresRelation.create( + database="my_database", + schema="my_schema", + identifier="my_materialized_view", + type=RelationType.MaterializedView, + ) + + index_changes = relation._get_index_config_changes(existing_indexes, new_indexes) + + assert isinstance(index_changes, list) + assert len(index_changes) == len(["drop 1", "drop 2", "create 2", "create 3"]) + assert index_changes[0].action == RelationConfigChangeAction.drop + assert index_changes[1].action == RelationConfigChangeAction.drop + assert index_changes[2].action == RelationConfigChangeAction.create + assert index_changes[3].action == RelationConfigChangeAction.create From d379e9e45c4387145da940aa41569f4cb2612700 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Mon, 29 Apr 2024 20:29:46 -0400 Subject: [PATCH 062/114] Release prep for `1.8.0b5` (#75) --- .changes/1.8.0-b5.md | 10 ++++++++++ .../Features-20240323-160222.yaml | 0 .../{unreleased => 1.8.0}/Fixes-20240423-180916.yaml | 0 .../{unreleased => 1.8.0}/Fixes-20240425-133401.yaml | 0 CHANGELOG.md | 11 +++++++++++ dbt/adapters/postgres/__version__.py | 2 +- 6 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 .changes/1.8.0-b5.md rename .changes/{unreleased => 1.8.0}/Features-20240323-160222.yaml (100%) rename .changes/{unreleased => 1.8.0}/Fixes-20240423-180916.yaml (100%) rename .changes/{unreleased => 1.8.0}/Fixes-20240425-133401.yaml (100%) diff --git a/.changes/1.8.0-b5.md b/.changes/1.8.0-b5.md new file mode 100644 index 000000000..196d68a7c --- /dev/null +++ b/.changes/1.8.0-b5.md @@ -0,0 +1,10 @@ +## dbt-postgres 1.8.0-b5 - April 29, 2024 + +### Features + +* Debug log when `type_code` fails to convert to a `data_type` + +### Fixes + +* remove materialized views from renambeable relation and remove a quote +* Replace usage of `Set` with `List` to fix issue with index updates intermittently happening out of order diff --git a/.changes/unreleased/Features-20240323-160222.yaml b/.changes/1.8.0/Features-20240323-160222.yaml similarity index 100% rename from .changes/unreleased/Features-20240323-160222.yaml rename to .changes/1.8.0/Features-20240323-160222.yaml diff --git a/.changes/unreleased/Fixes-20240423-180916.yaml b/.changes/1.8.0/Fixes-20240423-180916.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240423-180916.yaml rename to .changes/1.8.0/Fixes-20240423-180916.yaml diff --git a/.changes/unreleased/Fixes-20240425-133401.yaml b/.changes/1.8.0/Fixes-20240425-133401.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240425-133401.yaml rename to .changes/1.8.0/Fixes-20240425-133401.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index f9ef4153a..df8d5dec5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), and is generated by [Changie](https://github.com/miniscruff/changie). +## dbt-postgres 1.8.0-b5 - April 29, 2024 + +### Features + +* Debug log when `type_code` fails to convert to a `data_type` + +### Fixes + +* remove materialized views from renambeable relation and remove a quote +* Replace usage of `Set` with `List` to fix issue with index updates intermittently happening out of order + ## dbt-postgres 1.8.0-b2 - April 03, 2024 ### Under the Hood diff --git a/dbt/adapters/postgres/__version__.py b/dbt/adapters/postgres/__version__.py index 6b76061fd..c904307f4 100644 --- a/dbt/adapters/postgres/__version__.py +++ b/dbt/adapters/postgres/__version__.py @@ -1 +1 @@ -version = "1.8.0b4" +version = "1.8.0b5" From 18b93ae3cca1d55638cc55ee8b8f72e4270d154d Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Wed, 1 May 2024 14:46:21 -0700 Subject: [PATCH 063/114] Add branch param to release job (#79) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> --- .github/workflows/release.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 695424658..27fb9f4f3 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -10,6 +10,11 @@ on: - prod - test default: prod + ref: + description: "The ref (sha or branch name) to use" + type: string + default: "main" + required: true permissions: read-all @@ -33,6 +38,7 @@ jobs: uses: actions/checkout@v4 with: persist-credentials: false + ref: "${{ inputs.ref }}" - name: Setup `hatch` uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main From 3c90ecb850859be6d74fe5444fab9c2bbbfb5308 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Wed, 1 May 2024 15:26:48 -0700 Subject: [PATCH 064/114] Bump version to new alpha (#80) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> --- .changes/0.0.0.md | 3 ++ .changes/1.0.8-b3.md | 9 ----- .changes/1.8.0-b2.md | 14 ------- .changes/1.8.0-b5.md | 10 ----- .../1.8.0/Dependencies-20240328-133507.yaml | 6 --- .../1.8.0/Dependencies-20240403-135902.yaml | 6 --- .changes/1.8.0/Features-20240323-160222.yaml | 6 --- .changes/1.8.0/Fixes-20240423-180916.yaml | 6 --- .changes/1.8.0/Fixes-20240425-133401.yaml | 6 --- .changes/1.8.0/Security-20240327-193942.yaml | 6 --- .../1.8.0/Under the Hood-20240226-225642.yaml | 6 --- CHANGELOG.md | 38 ++----------------- dbt/adapters/postgres/__version__.py | 2 +- 13 files changed, 7 insertions(+), 111 deletions(-) create mode 100644 .changes/0.0.0.md delete mode 100644 .changes/1.0.8-b3.md delete mode 100644 .changes/1.8.0-b2.md delete mode 100644 .changes/1.8.0-b5.md delete mode 100644 .changes/1.8.0/Dependencies-20240328-133507.yaml delete mode 100644 .changes/1.8.0/Dependencies-20240403-135902.yaml delete mode 100644 .changes/1.8.0/Features-20240323-160222.yaml delete mode 100644 .changes/1.8.0/Fixes-20240423-180916.yaml delete mode 100644 .changes/1.8.0/Fixes-20240425-133401.yaml delete mode 100644 .changes/1.8.0/Security-20240327-193942.yaml delete mode 100644 .changes/1.8.0/Under the Hood-20240226-225642.yaml diff --git a/.changes/0.0.0.md b/.changes/0.0.0.md new file mode 100644 index 000000000..660fbd3be --- /dev/null +++ b/.changes/0.0.0.md @@ -0,0 +1,3 @@ +## Previous Releases +For information on prior major and minor releases, see their changelogs: +- [1.8](https://github.com/dbt-labs/dbt-postgres/blob/1.8.latest/CHANGELOG.md) diff --git a/.changes/1.0.8-b3.md b/.changes/1.0.8-b3.md deleted file mode 100644 index d73520b5e..000000000 --- a/.changes/1.0.8-b3.md +++ /dev/null @@ -1,9 +0,0 @@ -## dbt-postgres 1.0.8-b3 - April 16, 2024 - -### Fixes - -* Determine `psycopg2` based on `platform_system` (Linux or other), remove usage of `DBT_PSYCOPG2_NAME` environment variable - -### Under the Hood - -* Update dependabot configuration to cover GHA diff --git a/.changes/1.8.0-b2.md b/.changes/1.8.0-b2.md deleted file mode 100644 index 193206ccb..000000000 --- a/.changes/1.8.0-b2.md +++ /dev/null @@ -1,14 +0,0 @@ -## dbt-postgres 1.8.0-b2 - April 03, 2024 - -### Under the Hood - -* Add unit test for transaction semantics. - -### Dependencies - -* add "no-binary" install option -* Add `dbt-core` as a dependency to preserve backwards compatibility for installation - -### Security - -* Pin `black>=24.3` in `pyproject.toml` diff --git a/.changes/1.8.0-b5.md b/.changes/1.8.0-b5.md deleted file mode 100644 index 196d68a7c..000000000 --- a/.changes/1.8.0-b5.md +++ /dev/null @@ -1,10 +0,0 @@ -## dbt-postgres 1.8.0-b5 - April 29, 2024 - -### Features - -* Debug log when `type_code` fails to convert to a `data_type` - -### Fixes - -* remove materialized views from renambeable relation and remove a quote -* Replace usage of `Set` with `List` to fix issue with index updates intermittently happening out of order diff --git a/.changes/1.8.0/Dependencies-20240328-133507.yaml b/.changes/1.8.0/Dependencies-20240328-133507.yaml deleted file mode 100644 index c7dbd3198..000000000 --- a/.changes/1.8.0/Dependencies-20240328-133507.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: add "no-binary" install option -time: 2024-03-28T13:35:07.300121-07:00 -custom: - Author: colin-rogers-dbt - Issue: "6" diff --git a/.changes/1.8.0/Dependencies-20240403-135902.yaml b/.changes/1.8.0/Dependencies-20240403-135902.yaml deleted file mode 100644 index 126b2178b..000000000 --- a/.changes/1.8.0/Dependencies-20240403-135902.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Add `dbt-core` as a dependency to preserve backwards compatibility for installation -time: 2024-04-03T13:59:02.539298-04:00 -custom: - Author: mikealfare - Issue: "44" diff --git a/.changes/1.8.0/Features-20240323-160222.yaml b/.changes/1.8.0/Features-20240323-160222.yaml deleted file mode 100644 index c5af1acad..000000000 --- a/.changes/1.8.0/Features-20240323-160222.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Debug log when `type_code` fails to convert to a `data_type` -time: 2024-03-23T16:02:22.153674-06:00 -custom: - Author: dbeatty10 - Issue: "8912" diff --git a/.changes/1.8.0/Fixes-20240423-180916.yaml b/.changes/1.8.0/Fixes-20240423-180916.yaml deleted file mode 100644 index 48015bcb8..000000000 --- a/.changes/1.8.0/Fixes-20240423-180916.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: remove materialized views from renambeable relation and remove a quote -time: 2024-04-23T18:09:16.865258-05:00 -custom: - Author: McKnight-42 - Issue: "127" diff --git a/.changes/1.8.0/Fixes-20240425-133401.yaml b/.changes/1.8.0/Fixes-20240425-133401.yaml deleted file mode 100644 index cb6d14da3..000000000 --- a/.changes/1.8.0/Fixes-20240425-133401.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Replace usage of `Set` with `List` to fix issue with index updates intermittently happening out of order -time: 2024-04-25T13:34:01.018399-04:00 -custom: - Author: mikealfare - Issue: "72" diff --git a/.changes/1.8.0/Security-20240327-193942.yaml b/.changes/1.8.0/Security-20240327-193942.yaml deleted file mode 100644 index 66dee543d..000000000 --- a/.changes/1.8.0/Security-20240327-193942.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Security -body: Pin `black>=24.3` in `pyproject.toml` -time: 2024-03-27T19:39:42.633016-04:00 -custom: - Author: mikealfare - Issue: "40" diff --git a/.changes/1.8.0/Under the Hood-20240226-225642.yaml b/.changes/1.8.0/Under the Hood-20240226-225642.yaml deleted file mode 100644 index dd5d0645e..000000000 --- a/.changes/1.8.0/Under the Hood-20240226-225642.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add unit test for transaction semantics. -time: 2024-02-26T22:56:42.202429-08:00 -custom: - Author: versusfacit - Issue: "23" diff --git a/CHANGELOG.md b/CHANGELOG.md index df8d5dec5..5beb02ea0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,38 +5,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), and is generated by [Changie](https://github.com/miniscruff/changie). -## dbt-postgres 1.8.0-b5 - April 29, 2024 - -### Features - -* Debug log when `type_code` fails to convert to a `data_type` - -### Fixes - -* remove materialized views from renambeable relation and remove a quote -* Replace usage of `Set` with `List` to fix issue with index updates intermittently happening out of order - -## dbt-postgres 1.8.0-b2 - April 03, 2024 - -### Under the Hood - -* Add unit test for transaction semantics. - -### Dependencies - -* add "no-binary" install option -* Add `dbt-core` as a dependency to preserve backwards compatibility for installation - -### Security - -* Pin `black>=24.3` in `pyproject.toml` - -## dbt-postgres 1.0.8-b3 - April 16, 2024 - -### Fixes - -* Determine `psycopg2` based on `platform_system` (Linux or other), remove usage of `DBT_PSYCOPG2_NAME` environment variable - -### Under the Hood - -* Update dependabot configuration to cover GHA +## Previous Releases +For information on prior major and minor releases, see their changelogs: +- [1.8](https://github.com/dbt-labs/dbt-postgres/blob/1.8.latest/CHANGELOG.md) diff --git a/dbt/adapters/postgres/__version__.py b/dbt/adapters/postgres/__version__.py index c904307f4..6698ed64c 100644 --- a/dbt/adapters/postgres/__version__.py +++ b/dbt/adapters/postgres/__version__.py @@ -1 +1 @@ -version = "1.8.0b5" +version = "1.9.0a1" From 8446b977a01b1443f1ae14b78ce4d58d46f765c8 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 2 May 2024 18:29:58 -0400 Subject: [PATCH 065/114] [Tech Debt] Remove context methods test suite (#83) --- .../context_methods/first_dependency.py | 95 -------- .../context_methods/test_builtin_functions.py | 143 ------------ .../context_methods/test_cli_var_override.py | 66 ------ .../context_methods/test_cli_vars.py | 205 ------------------ .../context_methods/test_custom_env_vars.py | 36 --- .../context_methods/test_env_vars.py | 195 ----------------- .../context_methods/test_secret_env_vars.py | 185 ---------------- .../context_methods/test_var_dependency.py | 82 ------- .../test_var_in_generate_name.py | 43 ---- .../context_methods/test_yaml_functions.py | 49 ----- 10 files changed, 1099 deletions(-) delete mode 100644 tests/functional/context_methods/first_dependency.py delete mode 100644 tests/functional/context_methods/test_builtin_functions.py delete mode 100644 tests/functional/context_methods/test_cli_var_override.py delete mode 100644 tests/functional/context_methods/test_cli_vars.py delete mode 100644 tests/functional/context_methods/test_custom_env_vars.py delete mode 100644 tests/functional/context_methods/test_env_vars.py delete mode 100644 tests/functional/context_methods/test_secret_env_vars.py delete mode 100644 tests/functional/context_methods/test_var_dependency.py delete mode 100644 tests/functional/context_methods/test_var_in_generate_name.py delete mode 100644 tests/functional/context_methods/test_yaml_functions.py diff --git a/tests/functional/context_methods/first_dependency.py b/tests/functional/context_methods/first_dependency.py deleted file mode 100644 index 8e1365be9..000000000 --- a/tests/functional/context_methods/first_dependency.py +++ /dev/null @@ -1,95 +0,0 @@ -from dbt.tests.fixtures.project import write_project_files -import pytest - - -first_dependency__dbt_project_yml = """ -name: 'first_dep' -version: '1.0' -config-version: 2 - -profile: 'default' - -model-paths: ["models"] -analysis-paths: ["analyses"] -test-paths: ["tests"] -seed-paths: ["seeds"] -macro-paths: ["macros"] - -require-dbt-version: '>=0.1.0' - -target-path: "target" # directory which will store compiled SQL files -clean-targets: # directories to be removed by `dbt clean` - - "target" - - "dbt_packages" - -vars: - first_dep: - first_dep_global: 'first_dep_global_value_overridden' - test_config_root_override: 'configured_from_dependency' - test_config_package: 'configured_from_dependency' - -seeds: - quote_columns: True - -""" - -first_dependency__models__nested__first_dep_model_sql = """ -select - '{{ var("first_dep_global") }}' as first_dep_global, - '{{ var("from_root_to_first") }}' as from_root -""" - -first_dependency__seeds__first_dep_expected_csv = """first_dep_global,from_root -first_dep_global_value_overridden,root_first_value -""" - -first_dependency__models__nested__first_dep_model_var_expected_csv = """test_config_root_override,test_config_package -configured_from_root,configured_from_dependency -""" - -first_dependency__models__nested__first_dep_model_var_sql = """ -select - '{{ config.get("test_config_root_override") }}' as test_config_root_override, - '{{ config.get("test_config_package") }}' as test_config_package -""" - -first_dependency__model_var_in_config_schema = """ -models: -- name: first_dep_model - config: - test_config_root_override: "{{ var('test_config_root_override') }}" - test_config_package: "{{ var('test_config_package') }}" -""" - - -class FirstDependencyProject: - @pytest.fixture(scope="class") - def first_dependency(self, project): - first_dependency_files = { - "dbt_project.yml": first_dependency__dbt_project_yml, - "models": { - "nested": { - "first_dep_model.sql": first_dependency__models__nested__first_dep_model_sql - } - }, - "seeds": {"first_dep_expected.csv": first_dependency__seeds__first_dep_expected_csv}, - } - write_project_files(project.project_root, "first_dependency", first_dependency_files) - - -class FirstDependencyConfigProject: - @pytest.fixture(scope="class") - def first_dependency(self, project): - first_dependency_files = { - "dbt_project.yml": first_dependency__dbt_project_yml, - "models": { - "nested": { - "first_dep_model.sql": first_dependency__models__nested__first_dep_model_var_sql, - "schema.yml": first_dependency__model_var_in_config_schema, - } - }, - "seeds": { - "first_dep_expected.csv": first_dependency__models__nested__first_dep_model_var_expected_csv - }, - } - write_project_files(project.project_root, "first_dependency", first_dependency_files) diff --git a/tests/functional/context_methods/test_builtin_functions.py b/tests/functional/context_methods/test_builtin_functions.py deleted file mode 100644 index b8a47b343..000000000 --- a/tests/functional/context_methods/test_builtin_functions.py +++ /dev/null @@ -1,143 +0,0 @@ -import json - -from dbt.tests.util import write_file -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -macros__validate_set_sql = """ -{% macro validate_set() %} - {% set set_result = set([1, 2, 2, 3, 'foo', False]) %} - {{ log("set_result: " ~ set_result) }} - {% set set_strict_result = set_strict([1, 2, 2, 3, 'foo', False]) %} - {{ log("set_strict_result: " ~ set_strict_result) }} -{% endmacro %} -""" - -macros__validate_zip_sql = """ -{% macro validate_zip() %} - {% set list_a = [1, 2] %} - {% set list_b = ['foo', 'bar'] %} - {% set zip_result = zip(list_a, list_b) | list %} - {{ log("zip_result: " ~ zip_result) }} - {% set zip_strict_result = zip_strict(list_a, list_b) | list %} - {{ log("zip_strict_result: " ~ zip_strict_result) }} -{% endmacro %} -""" - -macros__validate_invocation_sql = """ -{% macro validate_invocation(my_variable) %} - -- check a specific value - {{ log("use_colors: "~ invocation_args_dict['use_colors']) }} - -- whole dictionary (as string) - {{ log("invocation_result: "~ invocation_args_dict) }} -{% endmacro %} -""" - -macros__validate_dbt_metadata_envs_sql = """ -{% macro validate_dbt_metadata_envs() %} - {{ log("dbt_metadata_envs_result:"~ dbt_metadata_envs) }} -{% endmacro %} -""" - -models__set_exception_sql = """ -{% set set_strict_result = set_strict(1) %} -""" - -models__zip_exception_sql = """ -{% set zip_strict_result = zip_strict(1) %} -""" - - -def parse_json_logs(json_log_output): - parsed_logs = [] - for line in json_log_output.split("\n"): - try: - log = json.loads(line) - except ValueError: - continue - - parsed_logs.append(log) - - return parsed_logs - - -def find_result_in_parsed_logs(parsed_logs, result_name): - return next( - ( - item["data"]["msg"] - for item in parsed_logs - if result_name in item["data"].get("msg", "msg") - ), - False, - ) - - -class TestContextBuiltins: - @pytest.fixture(scope="class") - def macros(self): - return { - "validate_set.sql": macros__validate_set_sql, - "validate_zip.sql": macros__validate_zip_sql, - "validate_invocation.sql": macros__validate_invocation_sql, - "validate_dbt_metadata_envs.sql": macros__validate_dbt_metadata_envs_sql, - } - - def test_builtin_set_function(self, project): - _, log_output = run_dbt_and_capture(["--debug", "run-operation", "validate_set"]) - - # The order of the set isn't guaranteed so we can't check for the actual set in the logs - assert "set_result: " in log_output - assert "False" in log_output - assert "set_strict_result: " in log_output - - def test_builtin_zip_function(self, project): - _, log_output = run_dbt_and_capture(["--debug", "run-operation", "validate_zip"]) - - expected_zip = [(1, "foo"), (2, "bar")] - assert f"zip_result: {expected_zip}" in log_output - assert f"zip_strict_result: {expected_zip}" in log_output - - def test_builtin_invocation_args_dict_function(self, project): - _, log_output = run_dbt_and_capture( - [ - "--debug", - "--log-format=json", - "run-operation", - "validate_invocation", - "--args", - "{my_variable: test_variable}", - ] - ) - - parsed_logs = parse_json_logs(log_output) - use_colors = result = find_result_in_parsed_logs(parsed_logs, "use_colors") - assert use_colors == "use_colors: True" - invocation_dict = find_result_in_parsed_logs(parsed_logs, "invocation_result") - assert result - # The result should include a dictionary of all flags with values that aren't None - expected = ( - "'send_anonymous_usage_stats': False", - "'quiet': False", - "'print': True", - "'cache_selected_only': False", - "'macro': 'validate_invocation'", - "'args': {'my_variable': 'test_variable'}", - "'which': 'run-operation'", - "'indirect_selection': 'eager'", - ) - assert all(element in invocation_dict for element in expected) - - -class TestContextBuiltinExceptions: - # Assert compilation errors are raised with _strict equivalents - def test_builtin_function_exception(self, project): - write_file(models__set_exception_sql, project.project_root, "models", "raise.sql") - with pytest.raises(CompilationError): - run_dbt(["compile"]) - - write_file(models__zip_exception_sql, project.project_root, "models", "raise.sql") - with pytest.raises(CompilationError): - run_dbt(["compile"]) diff --git a/tests/functional/context_methods/test_cli_var_override.py b/tests/functional/context_methods/test_cli_var_override.py deleted file mode 100644 index 757ab521a..000000000 --- a/tests/functional/context_methods/test_cli_var_override.py +++ /dev/null @@ -1,66 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - - -models_override__schema_yml = """ -version: 2 -models: -- name: test_vars - columns: - - name: field - data_tests: - - accepted_values: - values: - - override -""" - -models_override__test_vars_sql = """ -select '{{ var("required") }}'::varchar as field -""" - - -# Tests that cli vars override vars set in the project config -class TestCLIVarOverride: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_override__schema_yml, - "test_vars.sql": models_override__test_vars_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "vars": { - "required": "present", - }, - } - - def test__override_vars_global(self, project): - run_dbt(["run", "--vars", "{required: override}"]) - run_dbt(["test"]) - - -# This one switches to setting a var in 'test' -class TestCLIVarOverridePorject: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_override__schema_yml, - "test_vars.sql": models_override__test_vars_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "vars": { - "test": { - "required": "present", - }, - }, - } - - def test__override_vars_project_level(self, project): - # This should be "override" - run_dbt(["run", "--vars", "{required: override}"]) - run_dbt(["test"]) diff --git a/tests/functional/context_methods/test_cli_vars.py b/tests/functional/context_methods/test_cli_vars.py deleted file mode 100644 index 8f6d6e8d5..000000000 --- a/tests/functional/context_methods/test_cli_vars.py +++ /dev/null @@ -1,205 +0,0 @@ -from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import get_artifact, run_dbt, write_config_file -from dbt_common.exceptions import CompilationError, DbtRuntimeError -import pytest -import yaml - - -models_complex__schema_yml = """ -version: 2 -models: -- name: complex_model - columns: - - name: var_1 - data_tests: - - accepted_values: - values: - - abc - - name: var_2 - data_tests: - - accepted_values: - values: - - def - - name: var_3 - data_tests: - - accepted_values: - values: - - jkl -""" - -models_complex__complex_model_sql = """ -select - '{{ var("variable_1") }}'::varchar as var_1, - '{{ var("variable_2")[0] }}'::varchar as var_2, - '{{ var("variable_3")["value"] }}'::varchar as var_3 -""" - -models_simple__schema_yml = """ -version: 2 -models: -- name: simple_model - columns: - - name: simple - data_tests: - - accepted_values: - values: - - abc -""" - -models_simple__simple_model_sql = """ -select - '{{ var("simple") }}'::varchar as simple -""" - -really_simple_model_sql = """ -select 'abc' as simple -""" - - -class TestCLIVars: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_complex__schema_yml, - "complex_model.sql": models_complex__complex_model_sql, - } - - def test__cli_vars_longform(self, project): - cli_vars = { - "variable_1": "abc", - "variable_2": ["def", "ghi"], - "variable_3": {"value": "jkl"}, - } - results = run_dbt(["run", "--vars", yaml.dump(cli_vars)]) - assert len(results) == 1 - results = run_dbt(["test", "--vars", yaml.dump(cli_vars)]) - assert len(results) == 3 - - -class TestCLIVarsSimple: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_simple__schema_yml, - "simple_model.sql": models_simple__simple_model_sql, - } - - def test__cli_vars_shorthand(self, project): - results = run_dbt(["run", "--vars", "simple: abc"]) - assert len(results) == 1 - results = run_dbt(["test", "--vars", "simple: abc"]) - assert len(results) == 1 - - def test__cli_vars_longer(self, project): - results = run_dbt(["run", "--vars", "{simple: abc, unused: def}"]) - assert len(results) == 1 - results = run_dbt(["test", "--vars", "{simple: abc, unused: def}"]) - assert len(results) == 1 - run_results = get_artifact(project.project_root, "target", "run_results.json") - assert run_results["args"]["vars"] == {"simple": "abc", "unused": "def"} - - -class TestCLIVarsProfile: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_simple__schema_yml, - "simple_model.sql": really_simple_model_sql, - } - - def test_cli_vars_in_profile(self, project, dbt_profile_data): - profile = dbt_profile_data - profile["test"]["outputs"]["default"]["host"] = "{{ var('db_host') }}" - write_config_file(profile, project.profiles_dir, "profiles.yml") - with pytest.raises(DbtRuntimeError): - results = run_dbt(["run"]) - results = run_dbt(["run", "--vars", "db_host: localhost"]) - assert len(results) == 1 - - -class TestCLIVarsPackages: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root, dbt_integration_project): # noqa: F811 - write_project_files(project_root, "dbt_integration_project", dbt_integration_project) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_simple__schema_yml, - "simple_model.sql": really_simple_model_sql, - } - - @pytest.fixture(scope="class") - def packages_config(self): - return {"packages": [{"local": "dbt_integration_project"}]} - - def test_cli_vars_in_packages(self, project, packages_config): - # Run working deps and run commands - run_dbt(["deps"]) - results = run_dbt(["run"]) - assert len(results) == 1 - - # Change packages.yml to contain a var - packages = packages_config - packages["packages"][0]["local"] = "{{ var('path_to_project') }}" - write_config_file(packages, project.project_root, "packages.yml") - - # Without vars args deps fails - with pytest.raises(DbtRuntimeError): - run_dbt(["deps"]) - - # With vars arg deps succeeds - results = run_dbt(["deps", "--vars", "path_to_project: dbt_integration_project"]) - assert results is None - - -initial_selectors_yml = """ -selectors: - - name: dev_defer_snapshots - default: "{{ target.name == 'dev' | as_bool }}" - definition: - method: fqn - value: '*' - exclude: - - method: config.materialized - value: snapshot -""" - -var_selectors_yml = """ -selectors: - - name: dev_defer_snapshots - default: "{{ var('snapshot_target') == 'dev' | as_bool }}" - definition: - method: fqn - value: '*' - exclude: - - method: config.materialized - value: snapshot -""" - - -class TestCLIVarsSelectors: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_simple__schema_yml, - "simple_model.sql": really_simple_model_sql, - } - - @pytest.fixture(scope="class") - def selectors(self): - return initial_selectors_yml - - def test_vars_in_selectors(self, project): - # initially runs ok - results = run_dbt(["run"]) - assert len(results) == 1 - - # Update the selectors.yml file to have a var - write_config_file(var_selectors_yml, project.project_root, "selectors.yml") - with pytest.raises(CompilationError): - run_dbt(["run"]) - - # Var in cli_vars works - results = run_dbt(["run", "--vars", "snapshot_target: dev"]) - assert len(results) == 1 diff --git a/tests/functional/context_methods/test_custom_env_vars.py b/tests/functional/context_methods/test_custom_env_vars.py deleted file mode 100644 index 50a9b00c7..000000000 --- a/tests/functional/context_methods/test_custom_env_vars.py +++ /dev/null @@ -1,36 +0,0 @@ -import json -import os - -import pytest - -from tests.functional.utils import run_dbt_and_capture - - -def parse_json_logs(json_log_output): - parsed_logs = [] - for line in json_log_output.split("\n"): - try: - log = json.loads(line) - except ValueError: - continue - - parsed_logs.append(log) - - return parsed_logs - - -class TestCustomVarInLogs: - @pytest.fixture(scope="class", autouse=True) - def setup(self): - # on windows, python uppercases env var names because windows is case insensitive - os.environ["DBT_ENV_CUSTOM_ENV_SOME_VAR"] = "value" - yield - del os.environ["DBT_ENV_CUSTOM_ENV_SOME_VAR"] - - def test_extra_filled(self, project): - _, log_output = run_dbt_and_capture( - ["--log-format=json", "deps"], - ) - logs = parse_json_logs(log_output) - for log in logs: - assert log["info"].get("extra") == {"SOME_VAR": "value"} diff --git a/tests/functional/context_methods/test_env_vars.py b/tests/functional/context_methods/test_env_vars.py deleted file mode 100644 index 0bfbd01c4..000000000 --- a/tests/functional/context_methods/test_env_vars.py +++ /dev/null @@ -1,195 +0,0 @@ -import os - -from dbt.constants import DEFAULT_ENV_PLACEHOLDER, SECRET_ENV_PREFIX -from dbt.tests.util import get_manifest -import pytest - -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -context_sql = """ - -{{ - config( - materialized='table' - ) -}} - -select - - -- compile-time variables - '{{ this }}' as "this", - '{{ this.name }}' as "this.name", - '{{ this.schema }}' as "this.schema", - '{{ this.table }}' as "this.table", - - '{{ target.dbname }}' as "target.dbname", - '{{ target.host }}' as "target.host", - '{{ target.name }}' as "target.name", - '{{ target.schema }}' as "target.schema", - '{{ target.type }}' as "target.type", - '{{ target.user }}' as "target.user", - '{{ target.get("pass", "") }}' as "target.pass", -- not actually included, here to test that it is _not_ present! - {{ target.port }} as "target.port", - {{ target.threads }} as "target.threads", - - -- runtime variables - '{{ run_started_at }}' as run_started_at, - '{{ invocation_id }}' as invocation_id, - '{{ thread_id }}' as thread_id, - - '{{ env_var("DBT_TEST_ENV_VAR") }}' as env_var, - '{{ env_var("DBT_TEST_IGNORE_DEFAULT", "ignored_default_val") }}' as env_var_ignore_default, - '{{ env_var("DBT_TEST_USE_DEFAULT", "use_my_default_val") }}' as env_var_use_default, - 'secret_variable' as env_var_secret, -- make sure the value itself is scrubbed from the logs - '{{ env_var("DBT_TEST_NOT_SECRET") }}' as env_var_not_secret - -""" - - -class TestEnvVars: - @pytest.fixture(scope="class") - def models(self): - return {"context.sql": context_sql} - - @pytest.fixture(scope="class", autouse=True) - def setup(self): - os.environ["DBT_TEST_ENV_VAR"] = "1" - os.environ["DBT_TEST_USER"] = "root" - os.environ["DBT_TEST_PASS"] = "password" - os.environ[SECRET_ENV_PREFIX + "SECRET"] = "secret_variable" - os.environ["DBT_TEST_NOT_SECRET"] = "regular_variable" - os.environ["DBT_TEST_IGNORE_DEFAULT"] = "ignored_default" - yield - del os.environ["DBT_TEST_ENV_VAR"] - del os.environ["DBT_TEST_USER"] - del os.environ[SECRET_ENV_PREFIX + "SECRET"] - del os.environ["DBT_TEST_NOT_SECRET"] - del os.environ["DBT_TEST_IGNORE_DEFAULT"] - - @pytest.fixture(scope="class") - def profiles_config_update(self, unique_schema): - return { - "test": { - "outputs": { - # don't use env_var's here so the integration tests can run - # seed sql statements and the like. default target is used - "dev": { - "type": "postgres", - "threads": 1, - "host": "localhost", - "port": 5432, - "user": "root", - "pass": "password", - "dbname": "dbt", - "schema": unique_schema, - }, - "prod": { - "type": "postgres", - "threads": 1, - "host": "localhost", - "port": 5432, - # root/password - "user": "{{ env_var('DBT_TEST_USER') }}", - "pass": "{{ env_var('DBT_TEST_PASS') }}", - "dbname": "dbt", - "schema": unique_schema, - }, - }, - "target": "dev", - } - } - - def get_ctx_vars(self, project): - fields = [ - "this", - "this.name", - "this.schema", - "this.table", - "target.dbname", - "target.host", - "target.name", - "target.port", - "target.schema", - "target.threads", - "target.type", - "target.user", - "target.pass", - "run_started_at", - "invocation_id", - "thread_id", - "env_var", - ] - field_list = ", ".join(['"{}"'.format(f) for f in fields]) - query = "select {field_list} from {schema}.context".format( - field_list=field_list, schema=project.test_schema - ) - vals = project.run_sql(query, fetch="all") - ctx = dict([(k, v) for (k, v) in zip(fields, vals[0])]) - return ctx - - def test_env_vars_dev( - self, - project, - ): - results = run_dbt(["run"]) - assert len(results) == 1 - ctx = self.get_ctx_vars(project) - - manifest = get_manifest(project.project_root) - expected = { - "DBT_TEST_ENV_VAR": "1", - "DBT_TEST_NOT_SECRET": "regular_variable", - "DBT_TEST_IGNORE_DEFAULT": "ignored_default", - "DBT_TEST_USE_DEFAULT": DEFAULT_ENV_PLACEHOLDER, - } - assert manifest.env_vars == expected - - this = '"{}"."{}"."context"'.format(project.database, project.test_schema) - assert ctx["this"] == this - - assert ctx["this.name"] == "context" - assert ctx["this.schema"] == project.test_schema - assert ctx["this.table"] == "context" - - assert ctx["target.dbname"] == "dbt" - assert ctx["target.host"] == "localhost" - assert ctx["target.name"] == "dev" - assert ctx["target.port"] == 5432 - assert ctx["target.schema"] == project.test_schema - assert ctx["target.threads"] == 1 - assert ctx["target.type"] == "postgres" - assert ctx["target.user"] == "root" - assert ctx["target.pass"] == "" - - assert ctx["env_var"] == "1" - - def test_env_vars_prod(self, project): - results = run_dbt(["run", "--target", "prod"]) - assert len(results) == 1 - ctx = self.get_ctx_vars(project) - - this = '"{}"."{}"."context"'.format(project.database, project.test_schema) - assert ctx["this"] == this - - assert ctx["this.name"] == "context" - assert ctx["this.schema"] == project.test_schema - assert ctx["this.table"] == "context" - - assert ctx["target.dbname"] == "dbt" - assert ctx["target.host"] == "localhost" - assert ctx["target.name"] == "prod" - assert ctx["target.port"] == 5432 - assert ctx["target.schema"] == project.test_schema - assert ctx["target.threads"] == 1 - assert ctx["target.type"] == "postgres" - assert ctx["target.user"] == "root" - assert ctx["target.pass"] == "" - assert ctx["env_var"] == "1" - - def test_env_vars_secrets(self, project): - os.environ["DBT_DEBUG"] = "True" - _, log_output = run_dbt_and_capture(["run", "--target", "prod"]) - - assert not ("secret_variable" in log_output) - assert "regular_variable" in log_output diff --git a/tests/functional/context_methods/test_secret_env_vars.py b/tests/functional/context_methods/test_secret_env_vars.py deleted file mode 100644 index a6a5537a7..000000000 --- a/tests/functional/context_methods/test_secret_env_vars.py +++ /dev/null @@ -1,185 +0,0 @@ -import os - -from dbt.constants import SECRET_ENV_PREFIX -from dbt.exceptions import ParsingError -from dbt.tests.util import read_file -from dbt_common.exceptions import DbtInternalError -import pytest - -from tests.functional.context_methods.first_dependency import FirstDependencyProject -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -secret_bad__context_sql = """ - -{{ - config( - materialized='table' - ) -}} - -select - - '{{ env_var("DBT_TEST_ENV_VAR") }}' as env_var, - '{{ env_var("DBT_ENV_SECRET_SECRET") }}' as env_var_secret, -- this should raise an error! - '{{ env_var("DBT_TEST_NOT_SECRET") }}' as env_var_not_secret - -""" - - -class TestDisallowSecretModel: - @pytest.fixture(scope="class") - def models(self): - return {"context.sql": secret_bad__context_sql} - - def test_disallow_secret(self, project): - with pytest.raises(ParsingError): - run_dbt(["compile"]) - - -models__context_sql = """ -{{ - config( - materialized='table' - ) -}} - -select - - -- compile-time variables - '{{ this }}' as "this", - '{{ this.name }}' as "this.name", - '{{ this.schema }}' as "this.schema", - '{{ this.table }}' as "this.table", - - '{{ target.dbname }}' as "target.dbname", - '{{ target.host }}' as "target.host", - '{{ target.name }}' as "target.name", - '{{ target.schema }}' as "target.schema", - '{{ target.type }}' as "target.type", - '{{ target.user }}' as "target.user", - '{{ target.get("pass", "") }}' as "target.pass", -- not actually included, here to test that it is _not_ present! - {{ target.port }} as "target.port", - {{ target.threads }} as "target.threads", - - -- runtime variables - '{{ run_started_at }}' as run_started_at, - '{{ invocation_id }}' as invocation_id, - '{{ thread_id }}' as thread_id, - - '{{ env_var("DBT_TEST_ENV_VAR") }}' as env_var, - 'secret_variable' as env_var_secret, -- make sure the value itself is scrubbed from the logs - '{{ env_var("DBT_TEST_NOT_SECRET") }}' as env_var_not_secret -""" - - -class TestAllowSecretProfilePackage(FirstDependencyProject): - @pytest.fixture(scope="class", autouse=True) - def setup(self): - os.environ[SECRET_ENV_PREFIX + "USER"] = "root" - os.environ[SECRET_ENV_PREFIX + "PASS"] = "password" - os.environ[SECRET_ENV_PREFIX + "PACKAGE"] = "first_dependency" - os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] = "abc123" - yield - del os.environ[SECRET_ENV_PREFIX + "USER"] - del os.environ[SECRET_ENV_PREFIX + "PASS"] - del os.environ[SECRET_ENV_PREFIX + "PACKAGE"] - del os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] - - @pytest.fixture(scope="class") - def models(self): - return {"context.sql": models__context_sql} - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - # the raw value of this secret *will* be written to lock file - "local": "{{ env_var('DBT_ENV_SECRET_PACKAGE') }}" - }, - { - # this secret env var will *not* be written to lock file - "git": "https://{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}@github.com/dbt-labs/dbt-external-tables.git" - }, - { - # this secret env var will *not* be written to lock file - "tarball": "https://{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}@github.com/dbt-labs/dbt-utils/archive/refs/tags/1.1.1.tar.gz", - "name": "dbt_utils", - }, - ] - } - - @pytest.fixture(scope="class") - def profile_target(self): - return { - "type": "postgres", - "threads": 1, - "host": "localhost", - "port": 5432, - # root/password - "user": "{{ env_var('DBT_ENV_SECRET_USER') }}", - "pass": "{{ env_var('DBT_ENV_SECRET_PASS') }}", - "dbname": "dbt", - } - - def test_allow_secrets(self, project, first_dependency): - _, log_output = run_dbt_and_capture(["deps"]) - lock_file_contents = read_file("package-lock.yml") - - # this will not be written to logs or lock file - assert not ("abc123" in log_output) - assert not ("abc123" in lock_file_contents) - assert "{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}" in lock_file_contents - - # this will be scrubbed from logs, but not from the lock file - assert not ("first_dependency" in log_output) - assert "first_dependency" in lock_file_contents - - -class TestCloneFailSecretScrubbed: - @pytest.fixture(scope="class", autouse=True) - def setup(self): - os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] = "abc123" - - @pytest.fixture(scope="class") - def models(self): - return {"context.sql": models__context_sql} - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://fakeuser:{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}@github.com/dbt-labs/fake-repo.git" - }, - ] - } - - def test_fail_clone_with_scrubbing(self, project): - with pytest.raises(DbtInternalError) as excinfo: - _, log_output = run_dbt_and_capture(["deps"]) - - assert "abc123" not in str(excinfo.value) - - -class TestCloneFailSecretNotRendered(TestCloneFailSecretScrubbed): - # as above, with some Jinja manipulation - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "git": "https://fakeuser:{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') | join(' ') }}@github.com/dbt-labs/fake-repo.git" - }, - ] - } - - def test_fail_clone_with_scrubbing(self, project): - with pytest.raises(DbtInternalError) as excinfo: - _, log_output = run_dbt_and_capture(["deps"]) - - # we should not see any manipulated form of the secret value (abc123) here - # we should see a manipulated form of the placeholder instead - assert "a b c 1 2 3" not in str(excinfo.value) - assert "D B T _ E N V _ S E C R E T _ G I T _ T O K E N" in str(excinfo.value) diff --git a/tests/functional/context_methods/test_var_dependency.py b/tests/functional/context_methods/test_var_dependency.py deleted file mode 100644 index a0c06db76..000000000 --- a/tests/functional/context_methods/test_var_dependency.py +++ /dev/null @@ -1,82 +0,0 @@ -from dbt.tests.util import check_relations_equal, run_dbt -import pytest - -from tests.functional.context_methods.first_dependency import ( - FirstDependencyConfigProject, - FirstDependencyProject, -) - - -dependency_seeds__root_model_expected_csv = """first_dep_global,from_root -dep_never_overridden,root_root_value -""" - -dependency_models__inside__model_sql = """ -select - '{{ var("first_dep_override") }}' as first_dep_global, - '{{ var("from_root_to_root") }}' as from_root - -""" - - -class TestVarDependencyInheritance(FirstDependencyProject): - @pytest.fixture(scope="class") - def seeds(self): - return {"root_model_expected.csv": dependency_seeds__root_model_expected_csv} - - @pytest.fixture(scope="class") - def models(self): - return {"inside": {"model.sql": dependency_models__inside__model_sql}} - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - {"local": "first_dependency"}, - ] - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "vars": { - "first_dep_override": "dep_never_overridden", - "test": { - "from_root_to_root": "root_root_value", - }, - "first_dep": { - "from_root_to_first": "root_first_value", - }, - }, - } - - def test_var_mutual_overrides_v1_conversion(self, project, first_dependency): - run_dbt(["deps"]) - assert len(run_dbt(["seed"])) == 2 - assert len(run_dbt(["run"])) == 2 - check_relations_equal(project.adapter, ["root_model_expected", "model"]) - check_relations_equal(project.adapter, ["first_dep_expected", "first_dep_model"]) - - -class TestVarConfigDependencyInheritance(FirstDependencyConfigProject): - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - {"local": "first_dependency"}, - ] - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "vars": { - "test_config_root_override": "configured_from_root", - }, - } - - def test_root_var_overrides_package_var(self, project, first_dependency): - run_dbt(["deps"]) - run_dbt(["seed"]) - assert len(run_dbt(["run"])) == 1 - check_relations_equal(project.adapter, ["first_dep_expected", "first_dep_model"]) diff --git a/tests/functional/context_methods/test_var_in_generate_name.py b/tests/functional/context_methods/test_var_in_generate_name.py deleted file mode 100644 index f36bec3a8..000000000 --- a/tests/functional/context_methods/test_var_in_generate_name.py +++ /dev/null @@ -1,43 +0,0 @@ -from dbt.tests.util import run_dbt, update_config_file -from dbt_common.exceptions import CompilationError -import pytest - - -model_sql = """ -select 1 as id -""" - -bad_generate_macros__generate_names_sql = """ -{% macro generate_schema_name(custom_schema_name, node) -%} - {% do var('somevar') %} - {% do return(dbt.generate_schema_name(custom_schema_name, node)) %} -{%- endmacro %} - -""" - - -class TestMissingVarGenerateNameMacro: - @pytest.fixture(scope="class") - def macros(self): - return {"generate_names.sql": bad_generate_macros__generate_names_sql} - - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": model_sql} - - def test_generate_schema_name_var(self, project): - # var isn't set, so generate_name macro fails - with pytest.raises(CompilationError) as excinfo: - run_dbt(["compile"]) - - assert "Required var 'somevar' not found in config" in str(excinfo.value) - - # globally scoped -- var is set at top-level - update_config_file({"vars": {"somevar": 1}}, project.project_root, "dbt_project.yml") - run_dbt(["compile"]) - - # locally scoped -- var is set in 'test' scope - update_config_file( - {"vars": {"test": {"somevar": 1}}}, project.project_root, "dbt_project.yml" - ) - run_dbt(["compile"]) diff --git a/tests/functional/context_methods/test_yaml_functions.py b/tests/functional/context_methods/test_yaml_functions.py deleted file mode 100644 index 8996abc93..000000000 --- a/tests/functional/context_methods/test_yaml_functions.py +++ /dev/null @@ -1,49 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - - -tests__from_yaml_sql = """ -{% set simplest = (fromyaml('a: 1') == {'a': 1}) %} -{% set nested_data %} -a: - b: - - c: 1 - d: 2 - - c: 3 - d: 4 -{% endset %} -{% set nested = (fromyaml(nested_data) == {'a': {'b': [{'c': 1, 'd': 2}, {'c': 3, 'd': 4}]}}) %} - -(select 'simplest' as name {% if simplest %}limit 0{% endif %}) -union all -(select 'nested' as name {% if nested %}limit 0{% endif %}) -""" - -tests__to_yaml_sql = """ -{% set simplest = (toyaml({'a': 1}) == 'a: 1\\n') %} -{% set default_sort = (toyaml({'b': 2, 'a': 1}) == 'b: 2\\na: 1\\n') %} -{% set unsorted = (toyaml({'b': 2, 'a': 1}, sort_keys=False) == 'b: 2\\na: 1\\n') %} -{% set sorted = (toyaml({'b': 2, 'a': 1}, sort_keys=True) == 'a: 1\\nb: 2\\n') %} -{% set default_results = (toyaml({'a': adapter}, 'failed') == 'failed') %} - -(select 'simplest' as name {% if simplest %}limit 0{% endif %}) -union all -(select 'default_sort' as name {% if default_sort %}limit 0{% endif %}) -union all -(select 'unsorted' as name {% if unsorted %}limit 0{% endif %}) -union all -(select 'sorted' as name {% if sorted %}limit 0{% endif %}) -union all -(select 'default_results' as name {% if default_results %}limit 0{% endif %}) -""" - - -class TestContextVars: - # This test has no actual models - - @pytest.fixture(scope="class") - def tests(self): - return {"from_yaml.sql": tests__from_yaml_sql, "to_yaml.sql": tests__to_yaml_sql} - - def test_json_data_tests(self, project): - assert len(run_dbt(["test"])) == 2 From f873247b0df2e51fafa8f9eb645df98804762f9b Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 8 May 2024 10:37:49 -0400 Subject: [PATCH 066/114] Create CODEOWNERS (#86) --- .github/CODEOWNERS | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..02ed72d45 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,3 @@ +# This codeowners file is used to ensure all PRs require reviews from the adapters team + +* @dbt-labs/adapters From 09518ecb4cc21ea05b216b0376b49bf32daa168b Mon Sep 17 00:00:00 2001 From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Thu, 9 May 2024 11:47:36 -0700 Subject: [PATCH 067/114] remove defer_state tests (#84) Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- tests/functional/defer_state/fixtures.py | 424 -------- .../defer_state/test_defer_state.py | 329 ------ .../defer_state/test_group_updates.py | 108 -- .../defer_state/test_modified_state.py | 964 ------------------ .../defer_state/test_run_results_state.py | 481 --------- 5 files changed, 2306 deletions(-) delete mode 100644 tests/functional/defer_state/fixtures.py delete mode 100644 tests/functional/defer_state/test_defer_state.py delete mode 100644 tests/functional/defer_state/test_group_updates.py delete mode 100644 tests/functional/defer_state/test_modified_state.py delete mode 100644 tests/functional/defer_state/test_run_results_state.py diff --git a/tests/functional/defer_state/fixtures.py b/tests/functional/defer_state/fixtures.py deleted file mode 100644 index 8b1d3d35b..000000000 --- a/tests/functional/defer_state/fixtures.py +++ /dev/null @@ -1,424 +0,0 @@ -seed_csv = """id,name -1,Alice -2,Bob -""" - -table_model_sql = """ -{{ config(materialized='table') }} -select * from {{ ref('ephemeral_model') }} - --- establish a macro dependency to trigger state:modified.macros --- depends on: {{ my_macro() }} -""" - -table_model_now_view_sql = """ -{{ config(materialized='view') }} -select * from {{ ref('ephemeral_model') }} - --- establish a macro dependency to trigger state:modified.macros --- depends on: {{ my_macro() }} -""" - -table_model_now_incremental_sql = """ -{{ config(materialized='incremental', on_schema_change='append_new_columns') }} -select * from {{ ref('ephemeral_model') }} - --- establish a macro dependency to trigger state:modified.macros --- depends on: {{ my_macro() }} -""" - -changed_table_model_sql = """ -{{ config(materialized='table') }} -select 1 as fun -""" - -view_model_sql = """ -select * from {{ ref('seed') }} - --- establish a macro dependency that trips infinite recursion if not handled --- depends on: {{ my_infinitely_recursive_macro() }} -""" - -view_model_now_table_sql = """ -{{ config(materialized='table') }} -select * from {{ ref('seed') }} - --- establish a macro dependency that trips infinite recursion if not handled --- depends on: {{ my_infinitely_recursive_macro() }} -""" - -changed_view_model_sql = """ -select * from no.such.table -""" - -ephemeral_model_sql = """ -{{ config(materialized='ephemeral') }} -select * from {{ ref('view_model') }} -""" - -changed_ephemeral_model_sql = """ -{{ config(materialized='ephemeral') }} -select * from no.such.table -""" - -schema_yml = """ -version: 2 -models: - - name: view_model - columns: - - name: id - data_tests: - - unique: - severity: error - - not_null - - name: name -""" - -no_contract_schema_yml = """ -version: 2 -models: - - name: table_model - config: {} - columns: - - name: id - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: name - data_type: text -""" - -contract_schema_yml = """ -version: 2 -models: - - name: table_model - config: - contract: - enforced: True - columns: - - name: id - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: name - data_type: text -""" - -modified_contract_schema_yml = """ -version: 2 -models: - - name: table_model - config: - contract: - enforced: True - columns: - - name: id - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: user_name - data_type: text -""" - -disabled_contract_schema_yml = """ -version: 2 -models: - - name: table_model - config: - contract: - enforced: False - columns: - - name: id - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: name - data_type: text -""" - -versioned_no_contract_schema_yml = """ -version: 2 -models: - - name: table_model - config: {} - versions: - - v: 1 - columns: - - name: id - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: name - data_type: text -""" - -versioned_contract_schema_yml = """ -version: 2 -models: - - name: table_model - config: - contract: - enforced: True - versions: - - v: 1 - columns: - - name: id - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: name - data_type: text -""" - -versioned_modified_contract_schema_yml = """ -version: 2 -models: - - name: table_model - config: - contract: - enforced: True - versions: - - v: 1 - columns: - - name: id - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: user_name - data_type: text -""" - -versioned_disabled_contract_schema_yml = """ -version: 2 -models: - - name: table_model - config: - contract: - enforced: False - versions: - - v: 1 - columns: - - name: id - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: name - data_type: text -""" - -constraint_schema_yml = """ -version: 2 -models: - - name: view_model - columns: - - name: id - data_tests: - - unique: - severity: error - - not_null - - name: name - - name: table_model - config: - contract: - enforced: True - constraints: - - type: primary_key - columns: [id] - columns: - - name: id - constraints: - - type: not_null - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: name - data_type: text -""" - -modified_column_constraint_schema_yml = """ -version: 2 -models: - - name: view_model - columns: - - name: id - data_tests: - - unique: - severity: error - - not_null - - name: name - - name: table_model - config: - contract: - enforced: True - constraints: - - type: primary_key - columns: [id] - columns: - - name: id - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: name - data_type: text -""" - -modified_model_constraint_schema_yml = """ -version: 2 -models: - - name: view_model - columns: - - name: id - data_tests: - - unique: - severity: error - - not_null - - name: name - - name: table_model - config: - contract: - enforced: True - columns: - - name: id - constraints: - - type: not_null - data_type: integer - data_tests: - - unique: - severity: error - - not_null - - name: name - data_type: text -""" - -exposures_yml = """ -version: 2 -exposures: - - name: my_exposure - type: application - depends_on: - - ref('view_model') - owner: - email: test@example.com -""" - -macros_sql = """ -{% macro my_macro() %} - {% do log('in a macro' ) %} -{% endmacro %} -""" - -infinite_macros_sql = """ -{# trigger infinite recursion if not handled #} - -{% macro my_infinitely_recursive_macro() %} - {{ return(adapter.dispatch('my_infinitely_recursive_macro')()) }} -{% endmacro %} - -{% macro default__my_infinitely_recursive_macro() %} - {% if unmet_condition %} - {{ my_infinitely_recursive_macro() }} - {% else %} - {{ return('') }} - {% endif %} -{% endmacro %} -""" - -snapshot_sql = """ -{% snapshot my_cool_snapshot %} - - {{ - config( - target_database=database, - target_schema=schema, - unique_key='id', - strategy='check', - check_cols=['id'], - ) - }} - select * from {{ ref('view_model') }} - -{% endsnapshot %} -""" - -model_1_sql = """ -select * from {{ ref('seed') }} -""" - -modified_model_1_sql = """ -select * from {{ ref('seed') }} -order by 1 -""" - -model_2_sql = """ -select id from {{ ref('model_1') }} -""" - -modified_model_2_sql = """ -select * from {{ ref('model_1') }} -order by 1 -""" - - -group_schema_yml = """ -groups: - - name: finance - owner: - email: finance@jaffleshop.com - -models: - - name: model_1 - config: - group: finance - - name: model_2 - config: - group: finance -""" - - -group_modified_schema_yml = """ -groups: - - name: accounting - owner: - email: finance@jaffleshop.com -models: - - name: model_1 - config: - group: accounting - - name: model_2 - config: - group: accounting -""" - -group_modified_fail_schema_yml = """ -groups: - - name: finance - owner: - email: finance@jaffleshop.com -models: - - name: model_1 - config: - group: accounting - - name: model_2 - config: - group: finance -""" diff --git a/tests/functional/defer_state/test_defer_state.py b/tests/functional/defer_state/test_defer_state.py deleted file mode 100644 index 45c1d93c8..000000000 --- a/tests/functional/defer_state/test_defer_state.py +++ /dev/null @@ -1,329 +0,0 @@ -from copy import deepcopy -import json -import os -import shutil - -from dbt.contracts.results import RunStatus -from dbt.exceptions import DbtRuntimeError -from dbt.tests.util import rm_file, run_dbt, write_file -import pytest - -from tests.functional.defer_state import fixtures - - -class BaseDeferState: - @pytest.fixture(scope="class") - def models(self): - return { - "table_model.sql": fixtures.table_model_sql, - "view_model.sql": fixtures.view_model_sql, - "ephemeral_model.sql": fixtures.ephemeral_model_sql, - "schema.yml": fixtures.schema_yml, - "exposures.yml": fixtures.exposures_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "macros.sql": fixtures.macros_sql, - "infinite_macros.sql": fixtures.infinite_macros_sql, - } - - @pytest.fixture(scope="class") - def seeds(self): - return { - "seed.csv": fixtures.seed_csv, - } - - @pytest.fixture(scope="class") - def snapshots(self): - return { - "snapshot.sql": fixtures.snapshot_sql, - } - - @pytest.fixture(scope="class") - def other_schema(self, unique_schema): - return unique_schema + "_other" - - @property - def project_config_update(self): - return { - "seeds": { - "test": { - "quote_columns": False, - } - } - } - - @pytest.fixture(scope="class") - def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema): - outputs = {"default": dbt_profile_target, "otherschema": deepcopy(dbt_profile_target)} - outputs["default"]["schema"] = unique_schema - outputs["otherschema"]["schema"] = other_schema - return {"test": {"outputs": outputs, "target": "default"}} - - def copy_state(self, project_root): - state_path = os.path.join(project_root, "state") - if not os.path.exists(state_path): - os.makedirs(state_path) - shutil.copyfile( - f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json" - ) - - def run_and_save_state(self, project_root, with_snapshot=False): - results = run_dbt(["seed"]) - assert len(results) == 1 - assert not any(r.node.deferred for r in results) - results = run_dbt(["run"]) - assert len(results) == 2 - assert not any(r.node.deferred for r in results) - results = run_dbt(["test"]) - assert len(results) == 2 - - if with_snapshot: - results = run_dbt(["snapshot"]) - assert len(results) == 1 - assert not any(r.node.deferred for r in results) - - # copy files - self.copy_state(project_root) - - -class TestDeferStateUnsupportedCommands(BaseDeferState): - def test_no_state(self, project): - # no "state" files present, snapshot fails - with pytest.raises(DbtRuntimeError): - run_dbt(["snapshot", "--state", "state", "--defer"]) - - -class TestRunCompileState(BaseDeferState): - def test_run_and_compile_defer(self, project): - self.run_and_save_state(project.project_root) - - # defer test, it succeeds - # Change directory to ensure that state directory is underneath - # project directory. - os.chdir(project.profiles_dir) - results = run_dbt(["compile", "--state", "state", "--defer"]) - assert len(results.results) == 6 - assert results.results[0].node.name == "seed" - - -class TestSnapshotState(BaseDeferState): - def test_snapshot_state_defer(self, project): - self.run_and_save_state(project.project_root) - # snapshot succeeds without --defer - run_dbt(["snapshot"]) - # copy files - self.copy_state(project.project_root) - # defer test, it succeeds - run_dbt(["snapshot", "--state", "state", "--defer"]) - # favor_state test, it succeeds - run_dbt(["snapshot", "--state", "state", "--defer", "--favor-state"]) - - -class TestRunDeferState(BaseDeferState): - def test_run_and_defer(self, project, unique_schema, other_schema): - project.create_test_schema(other_schema) - self.run_and_save_state(project.project_root) - - # test tests first, because run will change things - # no state, wrong schema, failure. - run_dbt(["test", "--target", "otherschema"], expect_pass=False) - - # test generate docs - # no state, wrong schema, empty nodes - catalog = run_dbt(["docs", "generate", "--target", "otherschema"]) - assert not catalog.nodes - - # no state, run also fails - run_dbt(["run", "--target", "otherschema"], expect_pass=False) - - # defer test, it succeeds - results = run_dbt( - ["test", "-m", "view_model+", "--state", "state", "--defer", "--target", "otherschema"] - ) - - # defer docs generate with state, catalog refers schema from the happy times - catalog = run_dbt( - [ - "docs", - "generate", - "-m", - "view_model+", - "--state", - "state", - "--defer", - "--target", - "otherschema", - ] - ) - assert "seed.test.seed" not in catalog.nodes - - # with state it should work though - results = run_dbt( - ["run", "-m", "view_model", "--state", "state", "--defer", "--target", "otherschema"] - ) - assert other_schema not in results[0].node.compiled_code - assert unique_schema in results[0].node.compiled_code - - with open("target/manifest.json") as fp: - data = json.load(fp) - assert data["nodes"]["seed.test.seed"]["deferred"] - - assert len(results) == 1 - - -class TestRunDeferStateChangedModel(BaseDeferState): - def test_run_defer_state_changed_model(self, project): - self.run_and_save_state(project.project_root) - - # change "view_model" - write_file(fixtures.changed_view_model_sql, "models", "view_model.sql") - - # the sql here is just wrong, so it should fail - run_dbt( - ["run", "-m", "view_model", "--state", "state", "--defer", "--target", "otherschema"], - expect_pass=False, - ) - # but this should work since we just use the old happy model - run_dbt( - ["run", "-m", "table_model", "--state", "state", "--defer", "--target", "otherschema"], - expect_pass=True, - ) - - # change "ephemeral_model" - write_file(fixtures.changed_ephemeral_model_sql, "models", "ephemeral_model.sql") - # this should fail because the table model refs a broken ephemeral - # model, which it should see - run_dbt( - ["run", "-m", "table_model", "--state", "state", "--defer", "--target", "otherschema"], - expect_pass=False, - ) - - -class TestRunDeferStateIFFNotExists(BaseDeferState): - def test_run_defer_iff_not_exists(self, project, unique_schema, other_schema): - project.create_test_schema(other_schema) - self.run_and_save_state(project.project_root) - - results = run_dbt(["seed", "--target", "otherschema"]) - assert len(results) == 1 - results = run_dbt(["run", "--state", "state", "--defer", "--target", "otherschema"]) - assert len(results) == 2 - - # because the seed now exists in our "other" schema, we should prefer it over the one - # available from state - assert other_schema in results[0].node.compiled_code - - # this time with --favor-state: even though the seed now exists in our "other" schema, - # we should still favor the one available from state - results = run_dbt( - ["run", "--state", "state", "--defer", "--favor-state", "--target", "otherschema"] - ) - assert len(results) == 2 - assert other_schema not in results[0].node.compiled_code - - -class TestDeferStateDeletedUpstream(BaseDeferState): - def test_run_defer_deleted_upstream(self, project, unique_schema, other_schema): - project.create_test_schema(other_schema) - self.run_and_save_state(project.project_root) - - # remove "ephemeral_model" + change "table_model" - rm_file("models", "ephemeral_model.sql") - write_file(fixtures.changed_table_model_sql, "models", "table_model.sql") - - # ephemeral_model is now gone. previously this caused a - # keyerror (dbt#2875), now it should pass - run_dbt( - ["run", "-m", "view_model", "--state", "state", "--defer", "--target", "otherschema"], - expect_pass=True, - ) - - # despite deferral, we should use models just created in our schema - results = run_dbt(["test", "--state", "state", "--defer", "--target", "otherschema"]) - assert other_schema in results[0].node.compiled_code - - # this time with --favor-state: prefer the models in the "other" schema, even though they exist in ours - run_dbt( - [ - "run", - "-m", - "view_model", - "--state", - "state", - "--defer", - "--favor-state", - "--target", - "otherschema", - ], - expect_pass=True, - ) - results = run_dbt(["test", "--state", "state", "--defer", "--favor-state"]) - assert other_schema not in results[0].node.compiled_code - - -class TestDeferStateFlag(BaseDeferState): - def test_defer_state_flag(self, project, unique_schema, other_schema): - project.create_test_schema(other_schema) - - # test that state deferral works correctly - run_dbt(["compile", "--target-path", "target_compile"]) - write_file(fixtures.view_model_now_table_sql, "models", "table_model.sql") - - results = run_dbt(["ls", "--select", "state:modified", "--state", "target_compile"]) - assert results == ["test.table_model"] - - run_dbt(["seed", "--target", "otherschema", "--target-path", "target_otherschema"]) - - # this will fail because we haven't loaded the seed in the default schema - run_dbt( - [ - "run", - "--select", - "state:modified", - "--defer", - "--state", - "target_compile", - "--favor-state", - ], - expect_pass=False, - ) - - # this will fail because we haven't passed in --state - with pytest.raises( - DbtRuntimeError, match="Got a state selector method, but no comparison manifest" - ): - run_dbt( - [ - "run", - "--select", - "state:modified", - "--defer", - "--defer-state", - "target_otherschema", - "--favor-state", - ], - expect_pass=False, - ) - - # this will succeed because we've loaded the seed in other schema and are successfully deferring to it instead - results = run_dbt( - [ - "run", - "--select", - "state:modified", - "--defer", - "--state", - "target_compile", - "--defer-state", - "target_otherschema", - "--favor-state", - ] - ) - - assert len(results.results) == 1 - assert results.results[0].status == RunStatus.Success - assert results.results[0].node.name == "table_model" - assert results.results[0].adapter_response["rows_affected"] == 2 diff --git a/tests/functional/defer_state/test_group_updates.py b/tests/functional/defer_state/test_group_updates.py deleted file mode 100644 index 5f3e8006b..000000000 --- a/tests/functional/defer_state/test_group_updates.py +++ /dev/null @@ -1,108 +0,0 @@ -import os - -from dbt.exceptions import ParsingError -from dbt.tests.util import copy_file, run_dbt, write_file -import pytest - -from tests.functional.defer_state import fixtures - - -class GroupSetup: - @pytest.fixture(scope="class") - def models(self): - return { - "model_1.sql": fixtures.model_1_sql, - "model_2.sql": fixtures.model_2_sql, - "schema.yml": fixtures.group_schema_yml, - } - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed.csv": fixtures.seed_csv} - - def group_setup(self): - # save initial state - run_dbt(["seed"]) - results = run_dbt(["compile"]) - - # add sanity checks for first result - assert len(results) == 3 - seed_result = results[0].node - assert seed_result.unique_id == "seed.test.seed" - model_1_result = results[1].node - assert model_1_result.unique_id == "model.test.model_1" - assert model_1_result.group == "finance" - model_2_result = results[2].node - assert model_2_result.unique_id == "model.test.model_2" - assert model_2_result.group == "finance" - - -class TestFullyModifiedGroups(GroupSetup): - def test_changed_groups(self, project): - self.group_setup() - - # copy manifest.json to "state" directory - os.makedirs("state") - target_path = os.path.join(project.project_root, "target") - copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) - - # update group name, modify model so it gets picked up - write_file(fixtures.modified_model_1_sql, "models", "model_1.sql") - write_file(fixtures.modified_model_2_sql, "models", "model_2.sql") - write_file(fixtures.group_modified_schema_yml, "models", "schema.yml") - - # this test is flaky if you don't clean first before the build - run_dbt(["clean"]) - # only thing in results should be model_1 - results = run_dbt(["build", "-s", "state:modified", "--defer", "--state", "./state"]) - - assert len(results) == 2 - model_1_result = results[0].node - assert model_1_result.unique_id == "model.test.model_1" - assert model_1_result.group == "accounting" # new group name! - model_2_result = results[1].node - assert model_2_result.unique_id == "model.test.model_2" - assert model_2_result.group == "accounting" # new group name! - - -class TestPartiallyModifiedGroups(GroupSetup): - def test_changed_groups(self, project): - self.group_setup() - - # copy manifest.json to "state" directory - os.makedirs("state") - target_path = os.path.join(project.project_root, "target") - copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) - - # update group name, modify model so it gets picked up - write_file(fixtures.modified_model_1_sql, "models", "model_1.sql") - write_file(fixtures.group_modified_schema_yml, "models", "schema.yml") - - # this test is flaky if you don't clean first before the build - run_dbt(["clean"]) - # only thing in results should be model_1 - results = run_dbt(["build", "-s", "state:modified", "--defer", "--state", "./state"]) - - assert len(results) == 1 - model_1_result = results[0].node - assert model_1_result.unique_id == "model.test.model_1" - assert model_1_result.group == "accounting" # new group name! - - -class TestBadGroups(GroupSetup): - def test_changed_groups(self, project): - self.group_setup() - - # copy manifest.json to "state" directory - os.makedirs("state") - target_path = os.path.join(project.project_root, "target") - copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) - - # update group with invalid name, modify model so it gets picked up - write_file(fixtures.modified_model_1_sql, "models", "model_1.sql") - write_file(fixtures.group_modified_fail_schema_yml, "models", "schema.yml") - - # this test is flaky if you don't clean first before the build - run_dbt(["clean"]) - with pytest.raises(ParsingError, match="Invalid group 'accounting'"): - run_dbt(["build", "-s", "state:modified", "--defer", "--state", "./state"]) diff --git a/tests/functional/defer_state/test_modified_state.py b/tests/functional/defer_state/test_modified_state.py deleted file mode 100644 index e108fe9f4..000000000 --- a/tests/functional/defer_state/test_modified_state.py +++ /dev/null @@ -1,964 +0,0 @@ -import os -import random -import shutil -import string - -from dbt.exceptions import ContractBreakingChangeError -from dbt.tests.util import get_manifest, update_config_file, write_file -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.defer_state import fixtures -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -class BaseModifiedState: - @pytest.fixture(scope="class") - def models(self): - return { - "table_model.sql": fixtures.table_model_sql, - "view_model.sql": fixtures.view_model_sql, - "ephemeral_model.sql": fixtures.ephemeral_model_sql, - "schema.yml": fixtures.schema_yml, - "exposures.yml": fixtures.exposures_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "macros.sql": fixtures.macros_sql, - "infinite_macros.sql": fixtures.infinite_macros_sql, - } - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed.csv": fixtures.seed_csv} - - @property - def project_config_update(self): - return { - "seeds": { - "test": { - "quote_columns": False, - } - } - } - - def copy_state(self): - if not os.path.exists("state"): - os.makedirs("state") - shutil.copyfile("target/manifest.json", "state/manifest.json") - - def run_and_save_state(self): - run_dbt(["seed"]) - run_dbt(["run"]) - self.copy_state() - - -class TestChangedSeedContents(BaseModifiedState): - def test_changed_seed_contents_state(self, project): - self.run_and_save_state() - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"], - expect_pass=True, - ) - assert len(results) == 0 - - results = run_dbt( - [ - "ls", - "--resource-type", - "seed", - "--exclude", - "state:unmodified", - "--state", - "./state", - ], - expect_pass=True, - ) - assert len(results) == 0 - - results = run_dbt( - [ - "ls", - "--resource-type", - "seed", - "--select", - "state:unmodified", - "--state", - "./state", - ], - expect_pass=True, - ) - assert len(results) == 1 - - # add a new row to the seed - changed_seed_contents = fixtures.seed_csv + "\n" + "3,carl" - write_file(changed_seed_contents, "seeds", "seed.csv") - - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"] - ) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt( - [ - "ls", - "--resource-type", - "seed", - "--exclude", - "state:unmodified", - "--state", - "./state", - ] - ) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "state:unmodified", "--state", "./state"] - ) - assert len(results) == 0 - - results = run_dbt(["ls", "--select", "state:modified", "--state", "./state"]) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt(["ls", "--exclude", "state:unmodified", "--state", "./state"]) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt(["ls", "--select", "state:unmodified", "--state", "./state"]) - assert len(results) == 6 - - results = run_dbt(["ls", "--select", "state:modified+", "--state", "./state"]) - assert len(results) == 7 - assert set(results) == { - "test.seed", - "test.table_model", - "test.view_model", - "test.ephemeral_model", - "test.not_null_view_model_id", - "test.unique_view_model_id", - "exposure:test.my_exposure", - } - - results = run_dbt(["ls", "--select", "state:unmodified+", "--state", "./state"]) - assert len(results) == 6 - assert set(results) == { - "test.table_model", - "test.view_model", - "test.ephemeral_model", - "test.not_null_view_model_id", - "test.unique_view_model_id", - "exposure:test.my_exposure", - } - - shutil.rmtree("./state") - self.copy_state() - - # make a very big seed - # assume each line is ~2 bytes + len(name) - target_size = 1 * 1024 * 1024 - line_size = 64 - num_lines = target_size // line_size - maxlines = num_lines + 4 - seed_lines = [fixtures.seed_csv] - for idx in range(4, maxlines): - value = "".join(random.choices(string.ascii_letters, k=62)) - seed_lines.append(f"{idx},{value}") - seed_contents = "\n".join(seed_lines) - write_file(seed_contents, "seeds", "seed.csv") - - # now if we run again, we should get a warning - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"] - ) - assert len(results) == 1 - assert results[0] == "test.seed" - - with pytest.raises(CompilationError) as exc: - run_dbt( - [ - "--warn-error", - "ls", - "--resource-type", - "seed", - "--select", - "state:modified", - "--state", - "./state", - ] - ) - assert ">1MB" in str(exc.value) - - # now check if unmodified returns none - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "state:unmodified", "--state", "./state"] - ) - assert len(results) == 0 - - shutil.rmtree("./state") - self.copy_state() - - # once it"s in path mode, we don"t mark it as modified if it changes - write_file(seed_contents + "\n1,test", "seeds", "seed.csv") - - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"], - expect_pass=True, - ) - assert len(results) == 0 - - results = run_dbt( - [ - "ls", - "--resource-type", - "seed", - "--exclude", - "state:unmodified", - "--state", - "./state", - ], - expect_pass=True, - ) - assert len(results) == 0 - - results = run_dbt( - [ - "ls", - "--resource-type", - "seed", - "--select", - "state:unmodified", - "--state", - "./state", - ], - expect_pass=True, - ) - assert len(results) == 1 - - -class TestChangedSeedConfig(BaseModifiedState): - def test_changed_seed_config(self, project): - self.run_and_save_state() - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"], - expect_pass=True, - ) - assert len(results) == 0 - - results = run_dbt( - [ - "ls", - "--resource-type", - "seed", - "--exclude", - "state:unmodified", - "--state", - "./state", - ], - expect_pass=True, - ) - assert len(results) == 0 - - results = run_dbt( - [ - "ls", - "--resource-type", - "seed", - "--select", - "state:unmodified", - "--state", - "./state", - ], - expect_pass=True, - ) - assert len(results) == 1 - - update_config_file({"seeds": {"test": {"quote_columns": False}}}, "dbt_project.yml") - - # quoting change -> seed changed - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "state:modified", "--state", "./state"] - ) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt( - [ - "ls", - "--resource-type", - "seed", - "--exclude", - "state:unmodified", - "--state", - "./state", - ] - ) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "state:unmodified", "--state", "./state"] - ) - assert len(results) == 0 - - -class TestUnrenderedConfigSame(BaseModifiedState): - def test_unrendered_config_same(self, project): - self.run_and_save_state() - results = run_dbt( - ["ls", "--resource-type", "model", "--select", "state:modified", "--state", "./state"], - expect_pass=True, - ) - assert len(results) == 0 - - results = run_dbt( - [ - "ls", - "--resource-type", - "model", - "--exclude", - "state:unmodified", - "--state", - "./state", - ], - expect_pass=True, - ) - assert len(results) == 0 - - results = run_dbt( - [ - "ls", - "--resource-type", - "model", - "--select", - "state:unmodified", - "--state", - "./state", - ], - expect_pass=True, - ) - assert len(results) == 3 - - # although this is the default value, dbt will recognize it as a change - # for previously-unconfigured models, because it"s been explicitly set - update_config_file({"models": {"test": {"materialized": "view"}}}, "dbt_project.yml") - results = run_dbt( - ["ls", "--resource-type", "model", "--select", "state:modified", "--state", "./state"] - ) - assert len(results) == 1 - assert results[0] == "test.view_model" - - # converse of above statement - results = run_dbt( - [ - "ls", - "--resource-type", - "model", - "--exclude", - "state:unmodified", - "--state", - "./state", - ] - ) - assert len(results) == 1 - assert results[0] == "test.view_model" - - results = run_dbt( - [ - "ls", - "--resource-type", - "model", - "--select", - "state:unmodified", - "--state", - "./state", - ] - ) - assert len(results) == 2 - assert set(results) == { - "test.table_model", - "test.ephemeral_model", - } - - -class TestChangedModelContents(BaseModifiedState): - def test_changed_model_contents(self, project): - self.run_and_save_state() - results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) - assert len(results) == 0 - - table_model_update = """ - {{ config(materialized="table") }} - - select * from {{ ref("seed") }} - """ - - write_file(table_model_update, "models", "table_model.sql") - - results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - -class TestNewMacro(BaseModifiedState): - def test_new_macro(self, project): - self.run_and_save_state() - - new_macro = """ - {% macro my_other_macro() %} - {% endmacro %} - """ - - # add a new macro to a new file - write_file(new_macro, "macros", "second_macro.sql") - - results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) - assert len(results) == 0 - - os.remove("macros/second_macro.sql") - # add a new macro to the existing file - with open("macros/macros.sql", "a") as fp: - fp.write(new_macro) - - results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) - assert len(results) == 0 - - results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) - assert len(results) == 0 - - -class TestChangedMacroContents(BaseModifiedState): - def test_changed_macro_contents(self, project): - self.run_and_save_state() - - # modify an existing macro - updated_macro = """ - {% macro my_macro() %} - {% do log("in a macro", info=True) %} - {% endmacro %} - """ - write_file(updated_macro, "macros", "macros.sql") - - # table_model calls this macro - results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) - assert len(results) == 1 - - results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) - assert len(results) == 1 - - -class TestChangedExposure(BaseModifiedState): - def test_changed_exposure(self, project): - self.run_and_save_state() - - # add an "owner.name" to existing exposure - updated_exposure = fixtures.exposures_yml + "\n name: John Doe\n" - write_file(updated_exposure, "models", "exposures.yml") - - results = run_dbt(["run", "--models", "+state:modified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "view_model" - - results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) - assert len(results) == 0 - - -class TestChangedContractUnversioned(BaseModifiedState): - MODEL_UNIQUE_ID = "model.test.table_model" - CONTRACT_SCHEMA_YML = fixtures.contract_schema_yml - MODIFIED_SCHEMA_YML = fixtures.modified_contract_schema_yml - DISABLED_SCHEMA_YML = fixtures.disabled_contract_schema_yml - NO_CONTRACT_SCHEMA_YML = fixtures.no_contract_schema_yml - - def test_changed_contract(self, project): - self.run_and_save_state() - - # update contract for table_model - write_file(self.CONTRACT_SCHEMA_YML, "models", "schema.yml") - - # This will find the table_model node modified both through a config change - # and by a non-breaking change to contract: true - results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - manifest = get_manifest(project.project_root) - model_unique_id = self.MODEL_UNIQUE_ID - model = manifest.nodes[model_unique_id] - expected_unrendered_config = {"contract": {"enforced": True}, "materialized": "table"} - assert model.unrendered_config == expected_unrendered_config - - # Run it again with "state:modified:contract", still finds modified due to contract: true - results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - first_contract_checksum = model.contract.checksum - assert first_contract_checksum - # save a new state - self.copy_state() - - # This should raise because a column name has changed - write_file(self.MODIFIED_SCHEMA_YML, "models", "schema.yml") - results = run_dbt(["run"], expect_pass=False) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - second_contract_checksum = model.contract.checksum - # double check different contract_checksums - assert first_contract_checksum != second_contract_checksum - - _, logs = run_dbt_and_capture( - ["run", "--models", "state:modified.contract", "--state", "./state"], expect_pass=False - ) - expected_error = "This model has an enforced contract that failed." - expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" - expected_change = "Please ensure the name, data_type, and number of columns in your contract match the columns in your model's definition" - assert expected_error in logs - assert expected_warning in logs - assert expected_change in logs - - # Go back to schema file without contract. Should throw a warning. - write_file(self.NO_CONTRACT_SCHEMA_YML, "models", "schema.yml") - _, logs = run_dbt_and_capture( - ["run", "--models", "state:modified.contract", "--state", "./state"] - ) - expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" - expected_change = "Contract enforcement was removed" - - # Now disable the contract. Should throw a warning - force warning into an error. - write_file(self.DISABLED_SCHEMA_YML, "models", "schema.yml") - with pytest.raises(CompilationError): - _, logs = run_dbt_and_capture( - [ - "--warn-error", - "run", - "--models", - "state:modified.contract", - "--state", - "./state", - ] - ) - expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" - expected_change = "Contract enforcement was removed" - - -class TestChangedContractVersioned(BaseModifiedState): - MODEL_UNIQUE_ID = "model.test.table_model.v1" - CONTRACT_SCHEMA_YML = fixtures.versioned_contract_schema_yml - MODIFIED_SCHEMA_YML = fixtures.versioned_modified_contract_schema_yml - DISABLED_SCHEMA_YML = fixtures.versioned_disabled_contract_schema_yml - NO_CONTRACT_SCHEMA_YML = fixtures.versioned_no_contract_schema_yml - - def test_changed_contract_versioned(self, project): - self.run_and_save_state() - - # update contract for table_model - write_file(self.CONTRACT_SCHEMA_YML, "models", "schema.yml") - - # This will find the table_model node modified both through a config change - # and by a non-breaking change to contract: true - results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - manifest = get_manifest(project.project_root) - model_unique_id = self.MODEL_UNIQUE_ID - model = manifest.nodes[model_unique_id] - expected_unrendered_config = {"contract": {"enforced": True}, "materialized": "table"} - assert model.unrendered_config == expected_unrendered_config - - # Run it again with "state:modified:contract", still finds modified due to contract: true - results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - first_contract_checksum = model.contract.checksum - assert first_contract_checksum - # save a new state - self.copy_state() - - # This should raise because a column name has changed - write_file(self.MODIFIED_SCHEMA_YML, "models", "schema.yml") - results = run_dbt(["run"], expect_pass=False) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - second_contract_checksum = model.contract.checksum - # double check different contract_checksums - assert first_contract_checksum != second_contract_checksum - with pytest.raises(ContractBreakingChangeError): - results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) - - # Go back to schema file without contract. Should raise an error. - write_file(self.NO_CONTRACT_SCHEMA_YML, "models", "schema.yml") - with pytest.raises(ContractBreakingChangeError): - results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) - - # Now disable the contract. Should raise an error. - write_file(self.DISABLED_SCHEMA_YML, "models", "schema.yml") - with pytest.raises(ContractBreakingChangeError): - results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) - - -class TestChangedConstraintUnversioned(BaseModifiedState): - def test_changed_constraint(self, project): - self.run_and_save_state() - - # update constraint for table_model - write_file(fixtures.constraint_schema_yml, "models", "schema.yml") - - # This will find the table_model node modified both through adding constraint - # and by a non-breaking change to contract: true - results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - manifest = get_manifest(project.project_root) - model_unique_id = "model.test.table_model" - model = manifest.nodes[model_unique_id] - expected_unrendered_config = {"contract": {"enforced": True}, "materialized": "table"} - assert model.unrendered_config == expected_unrendered_config - - # Run it again with "state:modified:contract", still finds modified due to contract: true - results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - first_contract_checksum = model.contract.checksum - assert first_contract_checksum - # save a new state - self.copy_state() - - # This should raise because a column level constraint was removed - write_file(fixtures.modified_column_constraint_schema_yml, "models", "schema.yml") - # we don't have a way to know this failed unless we have a previous state to refer to, so the run succeeds - results = run_dbt(["run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - second_contract_checksum = model.contract.checksum - # double check different contract_checksums - assert first_contract_checksum != second_contract_checksum - # since the models are unversioned, they raise a warning but not an error - _, logs = run_dbt_and_capture( - ["run", "--models", "state:modified.contract", "--state", "./state"] - ) - expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" - expected_change = "Enforced column level constraints were removed" - assert expected_warning in logs - assert expected_change in logs - - # This should raise because a model level constraint was removed (primary_key on id) - write_file(fixtures.modified_model_constraint_schema_yml, "models", "schema.yml") - # we don't have a way to know this failed unless we have a previous state to refer to, so the run succeeds - results = run_dbt(["run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - second_contract_checksum = model.contract.checksum - # double check different contract_checksums - assert first_contract_checksum != second_contract_checksum - _, logs = run_dbt_and_capture( - ["run", "--models", "state:modified.contract", "--state", "./state"] - ) - expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" - expected_change = "Enforced model level constraints were removed" - assert expected_warning in logs - assert expected_change in logs - - -class TestChangedMaterializationConstraint(BaseModifiedState): - def test_changed_materialization(self, project): - self.run_and_save_state() - - # update constraint for table_model - write_file(fixtures.constraint_schema_yml, "models", "schema.yml") - - # This will find the table_model node modified both through adding constraint - # and by a non-breaking change to contract: true - results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - manifest = get_manifest(project.project_root) - model_unique_id = "model.test.table_model" - model = manifest.nodes[model_unique_id] - expected_unrendered_config = {"contract": {"enforced": True}, "materialized": "table"} - assert model.unrendered_config == expected_unrendered_config - - # Run it again with "state:modified:contract", still finds modified due to contract: true - results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - first_contract_checksum = model.contract.checksum - assert first_contract_checksum - # save a new state - self.copy_state() - - # This should raise because materialization changed from table to view - write_file(fixtures.table_model_now_view_sql, "models", "table_model.sql") - # we don't have a way to know this failed unless we have a previous state to refer to, so the run succeeds - results = run_dbt(["run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - second_contract_checksum = model.contract.checksum - # double check different contract_checksums - assert first_contract_checksum != second_contract_checksum - _, logs = run_dbt_and_capture( - ["run", "--models", "state:modified.contract", "--state", "./state"] - ) - expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" - expected_change = "Materialization changed with enforced constraints" - assert expected_warning in logs - assert expected_change in logs - - # This should not raise because materialization changed from table to incremental, both enforce constraints - write_file(fixtures.table_model_now_incremental_sql, "models", "table_model.sql") - # we don't have a way to know this failed unless we have a previous state to refer to, so the run succeeds - results = run_dbt(["run"]) - assert len(results) == 2 - - # This should pass because materialization changed from view to table which is the same as just adding new constraint, not breaking - write_file(fixtures.view_model_now_table_sql, "models", "view_model.sql") - write_file(fixtures.table_model_sql, "models", "table_model.sql") - results = run_dbt(["run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_unique_id] - second_contract_checksum = model.contract.checksum - # contract_checksums should be equal because we only save constraint related changes if the materialization is table/incremental - assert first_contract_checksum == second_contract_checksum - run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) - assert len(results) == 2 - - -my_model_sql = """ -select 1 as id -""" - -modified_my_model_sql = """ --- a comment -select 1 as id -""" - -modified_my_model_non_breaking_sql = """ --- a comment -select 1 as id, 'blue' as color -""" - -my_model_yml = """ -models: - - name: my_model - latest_version: 1 - config: - contract: - enforced: true - columns: - - name: id - data_type: int - versions: - - v: 1 -""" - -modified_my_model_yml = """ -models: - - name: my_model - latest_version: 1 - config: - contract: - enforced: true - columns: - - name: id - data_type: text - versions: - - v: 1 -""" - -modified_my_model_non_breaking_yml = """ -models: - - name: my_model - latest_version: 1 - config: - contract: - enforced: true - columns: - - name: id - data_type: int - - name: color - data_type: text - versions: - - v: 1 -""" - - -class TestModifiedBodyAndContract: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_sql, - "my_model.yml": my_model_yml, - } - - def copy_state(self): - if not os.path.exists("state"): - os.makedirs("state") - shutil.copyfile("target/manifest.json", "state/manifest.json") - - def test_modified_body_and_contract(self, project): - results = run_dbt(["run"]) - assert len(results) == 1 - self.copy_state() - - # Change both body and contract in a *breaking* way (= changing data_type of existing column) - write_file(modified_my_model_yml, "models", "my_model.yml") - write_file(modified_my_model_sql, "models", "my_model.sql") - - # Should raise even without specifying state:modified.contract - with pytest.raises(ContractBreakingChangeError): - results = run_dbt(["run", "-s", "state:modified", "--state", "./state"]) - - with pytest.raises(ContractBreakingChangeError): - results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) - - # Change both body and contract in a *non-breaking* way (= adding a new column) - write_file(modified_my_model_non_breaking_yml, "models", "my_model.yml") - write_file(modified_my_model_non_breaking_sql, "models", "my_model.sql") - - # Should pass - run_dbt(["run", "-s", "state:modified", "--state", "./state"]) - - # The model's contract has changed, even if non-breaking, so it should be selected by 'state:modified.contract' - results = run_dbt(["list", "-s", "state:modified.contract", "--state", "./state"]) - assert results == ["test.my_model.v1"] - - -modified_table_model_access_yml = """ -version: 2 -models: - - name: table_model - access: public -""" - - -class TestModifiedAccess(BaseModifiedState): - def test_changed_access(self, project): - self.run_and_save_state() - - # No access change - assert not run_dbt(["list", "-s", "state:modified", "--state", "./state"]) - - # Modify access (protected -> public) - write_file(modified_table_model_access_yml, "models", "schema.yml") - assert run_dbt(["list", "-s", "state:modified", "--state", "./state"]) - - results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) - assert results == ["test.table_model"] - - -modified_table_model_access_yml = """ -version: 2 -models: - - name: table_model - deprecation_date: 2020-01-01 -""" - - -class TestModifiedDeprecationDate(BaseModifiedState): - def test_changed_access(self, project): - self.run_and_save_state() - - # No access change - assert not run_dbt(["list", "-s", "state:modified", "--state", "./state"]) - - # Modify deprecation_date (None -> 2020-01-01) - write_file(modified_table_model_access_yml, "models", "schema.yml") - assert run_dbt(["list", "-s", "state:modified", "--state", "./state"]) - - results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) - assert results == ["test.table_model"] - - -modified_table_model_version_yml = """ -version: 2 -models: - - name: table_model - versions: - - v: 1 - defined_in: table_model -""" - - -class TestModifiedVersion(BaseModifiedState): - def test_changed_access(self, project): - self.run_and_save_state() - - # Change version (null -> v1) - write_file(modified_table_model_version_yml, "models", "schema.yml") - - results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) - assert results == ["test.table_model.v1"] - - -table_model_latest_version_yml = """ -version: 2 -models: - - name: table_model - latest_version: 1 - versions: - - v: 1 - defined_in: table_model -""" - - -modified_table_model_latest_version_yml = """ -version: 2 -models: - - name: table_model - latest_version: 2 - versions: - - v: 1 - defined_in: table_model - - v: 2 -""" - - -class TestModifiedLatestVersion(BaseModifiedState): - def test_changed_access(self, project): - # Setup initial latest_version: 1 - write_file(table_model_latest_version_yml, "models", "schema.yml") - - self.run_and_save_state() - - # Bump latest version - write_file(fixtures.table_model_sql, "models", "table_model_v2.sql") - write_file(modified_table_model_latest_version_yml, "models", "schema.yml") - - results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) - assert results == ["test.table_model.v1", "test.table_model.v2"] diff --git a/tests/functional/defer_state/test_run_results_state.py b/tests/functional/defer_state/test_run_results_state.py deleted file mode 100644 index ae5941c7c..000000000 --- a/tests/functional/defer_state/test_run_results_state.py +++ /dev/null @@ -1,481 +0,0 @@ -import os -import shutil - -from dbt.tests.util import run_dbt, write_file -import pytest - -from tests.functional.defer_state import fixtures - - -class BaseRunResultsState: - @pytest.fixture(scope="class") - def models(self): - return { - "table_model.sql": fixtures.table_model_sql, - "view_model.sql": fixtures.view_model_sql, - "ephemeral_model.sql": fixtures.ephemeral_model_sql, - "schema.yml": fixtures.schema_yml, - "exposures.yml": fixtures.exposures_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "macros.sql": fixtures.macros_sql, - "infinite_macros.sql": fixtures.infinite_macros_sql, - } - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed.csv": fixtures.seed_csv} - - @property - def project_config_update(self): - return { - "seeds": { - "test": { - "quote_columns": False, - } - } - } - - def clear_state(self): - shutil.rmtree("./state") - - def copy_state(self): - if not os.path.exists("state"): - os.makedirs("state") - shutil.copyfile("target/manifest.json", "state/manifest.json") - shutil.copyfile("target/run_results.json", "state/run_results.json") - - def run_and_save_state(self): - run_dbt(["build"]) - self.copy_state() - - def rebuild_run_dbt(self, expect_pass=True): - self.clear_state() - run_dbt(["build"], expect_pass=expect_pass) - self.copy_state() - - def update_view_model_bad_sql(self): - # update view model to generate a failure case - not_unique_sql = "select * from forced_error" - write_file(not_unique_sql, "models", "view_model.sql") - - def update_view_model_failing_tests(self, with_dupes=True, with_nulls=False): - # test failure on build tests - # fail the unique test - select_1 = "select 1 as id" - select_stmts = [select_1] - if with_dupes: - select_stmts.append(select_1) - if with_nulls: - select_stmts.append("select null as id") - failing_tests_sql = " union all ".join(select_stmts) - write_file(failing_tests_sql, "models", "view_model.sql") - - def update_unique_test_severity_warn(self): - # change the unique test severity from error to warn and reuse the same view_model.sql changes above - new_config = fixtures.schema_yml.replace("error", "warn") - write_file(new_config, "models", "schema.yml") - - -class TestSeedRunResultsState(BaseRunResultsState): - def test_seed_run_results_state(self, project): - self.run_and_save_state() - self.clear_state() - run_dbt(["seed"]) - self.copy_state() - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "result:success", "--state", "./state"], - expect_pass=True, - ) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt(["ls", "--select", "result:success", "--state", "./state"]) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt(["ls", "--select", "result:success+", "--state", "./state"]) - assert len(results) == 7 - assert set(results) == { - "test.seed", - "test.table_model", - "test.view_model", - "test.ephemeral_model", - "test.not_null_view_model_id", - "test.unique_view_model_id", - "exposure:test.my_exposure", - } - - # add a new faulty row to the seed - changed_seed_contents = fixtures.seed_csv + "\n" + "\\\3,carl" - write_file(changed_seed_contents, "seeds", "seed.csv") - - self.clear_state() - run_dbt(["seed"], expect_pass=False) - self.copy_state() - - results = run_dbt( - ["ls", "--resource-type", "seed", "--select", "result:error", "--state", "./state"], - expect_pass=True, - ) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt(["ls", "--select", "result:error", "--state", "./state"]) - assert len(results) == 1 - assert results[0] == "test.seed" - - results = run_dbt(["ls", "--select", "result:error+", "--state", "./state"]) - assert len(results) == 7 - assert set(results) == { - "test.seed", - "test.table_model", - "test.view_model", - "test.ephemeral_model", - "test.not_null_view_model_id", - "test.unique_view_model_id", - "exposure:test.my_exposure", - } - - -class TestBuildRunResultsState(BaseRunResultsState): - def test_build_run_results_state(self, project): - self.run_and_save_state() - results = run_dbt(["build", "--select", "result:error", "--state", "./state"]) - assert len(results) == 0 - - self.update_view_model_bad_sql() - self.rebuild_run_dbt(expect_pass=False) - - results = run_dbt( - ["build", "--select", "result:error", "--state", "./state"], expect_pass=False - ) - assert len(results) == 3 - nodes = set([elem.node.name for elem in results]) - assert nodes == {"view_model", "not_null_view_model_id", "unique_view_model_id"} - - results = run_dbt(["ls", "--select", "result:error", "--state", "./state"]) - assert len(results) == 3 - assert set(results) == { - "test.view_model", - "test.not_null_view_model_id", - "test.unique_view_model_id", - } - - results = run_dbt( - ["build", "--select", "result:error+", "--state", "./state"], expect_pass=False - ) - assert len(results) == 4 - nodes = set([elem.node.name for elem in results]) - assert nodes == { - "table_model", - "view_model", - "not_null_view_model_id", - "unique_view_model_id", - } - - results = run_dbt(["ls", "--select", "result:error+", "--state", "./state"]) - assert len(results) == 6 # includes exposure - assert set(results) == { - "test.table_model", - "test.view_model", - "test.ephemeral_model", - "test.not_null_view_model_id", - "test.unique_view_model_id", - "exposure:test.my_exposure", - } - - self.update_view_model_failing_tests() - self.rebuild_run_dbt(expect_pass=False) - - results = run_dbt( - ["build", "--select", "result:fail", "--state", "./state"], expect_pass=False - ) - assert len(results) == 1 - assert results[0].node.name == "unique_view_model_id" - - results = run_dbt(["ls", "--select", "result:fail", "--state", "./state"]) - assert len(results) == 1 - assert results[0] == "test.unique_view_model_id" - - results = run_dbt( - ["build", "--select", "result:fail+", "--state", "./state"], expect_pass=False - ) - assert len(results) == 1 - nodes = set([elem.node.name for elem in results]) - assert nodes == {"unique_view_model_id"} - - results = run_dbt(["ls", "--select", "result:fail+", "--state", "./state"]) - assert len(results) == 1 - assert set(results) == {"test.unique_view_model_id"} - - self.update_unique_test_severity_warn() - self.rebuild_run_dbt(expect_pass=True) - - results = run_dbt( - ["build", "--select", "result:warn", "--state", "./state"], expect_pass=True - ) - assert len(results) == 1 - assert results[0].node.name == "unique_view_model_id" - - results = run_dbt(["ls", "--select", "result:warn", "--state", "./state"]) - assert len(results) == 1 - assert results[0] == "test.unique_view_model_id" - - results = run_dbt( - ["build", "--select", "result:warn+", "--state", "./state"], expect_pass=True - ) - assert len(results) == 1 - nodes = set([elem.node.name for elem in results]) - assert nodes == {"unique_view_model_id"} - - results = run_dbt(["ls", "--select", "result:warn+", "--state", "./state"]) - assert len(results) == 1 - assert set(results) == {"test.unique_view_model_id"} - - -class TestRunRunResultsState(BaseRunResultsState): - def test_run_run_results_state(self, project): - self.run_and_save_state() - results = run_dbt( - ["run", "--select", "result:success", "--state", "./state"], expect_pass=True - ) - assert len(results) == 2 - assert results[0].node.name == "view_model" - assert results[1].node.name == "table_model" - - # clear state and rerun upstream view model to test + operator - self.clear_state() - run_dbt(["run", "--select", "view_model"], expect_pass=True) - self.copy_state() - results = run_dbt( - ["run", "--select", "result:success+", "--state", "./state"], expect_pass=True - ) - assert len(results) == 2 - assert results[0].node.name == "view_model" - assert results[1].node.name == "table_model" - - # check we are starting from a place with 0 errors - results = run_dbt(["run", "--select", "result:error", "--state", "./state"]) - assert len(results) == 0 - - self.update_view_model_bad_sql() - self.clear_state() - run_dbt(["run"], expect_pass=False) - self.copy_state() - - # test single result selector on error - results = run_dbt( - ["run", "--select", "result:error", "--state", "./state"], expect_pass=False - ) - assert len(results) == 1 - assert results[0].node.name == "view_model" - - # test + operator selection on error - results = run_dbt( - ["run", "--select", "result:error+", "--state", "./state"], expect_pass=False - ) - assert len(results) == 2 - assert results[0].node.name == "view_model" - assert results[1].node.name == "table_model" - - # single result selector on skipped. Expect this to pass becase underlying view already defined above - results = run_dbt( - ["run", "--select", "result:skipped", "--state", "./state"], expect_pass=True - ) - assert len(results) == 1 - assert results[0].node.name == "table_model" - - # add a downstream model that depends on table_model for skipped+ selector - downstream_model_sql = "select * from {{ref('table_model')}}" - write_file(downstream_model_sql, "models", "table_model_downstream.sql") - - self.clear_state() - run_dbt(["run"], expect_pass=False) - self.copy_state() - - results = run_dbt( - ["run", "--select", "result:skipped+", "--state", "./state"], expect_pass=True - ) - assert len(results) == 2 - assert results[0].node.name == "table_model" - assert results[1].node.name == "table_model_downstream" - - -class TestTestRunResultsState(BaseRunResultsState): - def test_test_run_results_state(self, project): - self.run_and_save_state() - # run passed nodes - results = run_dbt( - ["test", "--select", "result:pass", "--state", "./state"], expect_pass=True - ) - assert len(results) == 2 - nodes = set([elem.node.name for elem in results]) - assert nodes == {"unique_view_model_id", "not_null_view_model_id"} - - # run passed nodes with + operator - results = run_dbt( - ["test", "--select", "result:pass+", "--state", "./state"], expect_pass=True - ) - assert len(results) == 2 - nodes = set([elem.node.name for elem in results]) - assert nodes == {"unique_view_model_id", "not_null_view_model_id"} - - self.update_view_model_failing_tests() - self.rebuild_run_dbt(expect_pass=False) - - # test with failure selector - results = run_dbt( - ["test", "--select", "result:fail", "--state", "./state"], expect_pass=False - ) - assert len(results) == 1 - assert results[0].node.name == "unique_view_model_id" - - # test with failure selector and + operator - results = run_dbt( - ["test", "--select", "result:fail+", "--state", "./state"], expect_pass=False - ) - assert len(results) == 1 - assert results[0].node.name == "unique_view_model_id" - - self.update_unique_test_severity_warn() - # rebuild - expect_pass = True because we changed the error to a warning this time around - self.rebuild_run_dbt(expect_pass=True) - - # test with warn selector - results = run_dbt( - ["test", "--select", "result:warn", "--state", "./state"], expect_pass=True - ) - assert len(results) == 1 - assert results[0].node.name == "unique_view_model_id" - - # test with warn selector and + operator - results = run_dbt( - ["test", "--select", "result:warn+", "--state", "./state"], expect_pass=True - ) - assert len(results) == 1 - assert results[0].node.name == "unique_view_model_id" - - -class TestConcurrentSelectionRunResultsState(BaseRunResultsState): - def test_concurrent_selection_run_run_results_state(self, project): - self.run_and_save_state() - results = run_dbt( - ["run", "--select", "state:modified+", "result:error+", "--state", "./state"] - ) - assert len(results) == 0 - - self.update_view_model_bad_sql() - self.clear_state() - run_dbt(["run"], expect_pass=False) - self.copy_state() - - # add a new failing dbt model - bad_sql = "select * from forced_error" - write_file(bad_sql, "models", "table_model_modified_example.sql") - - results = run_dbt( - ["run", "--select", "state:modified+", "result:error+", "--state", "./state"], - expect_pass=False, - ) - assert len(results) == 3 - nodes = set([elem.node.name for elem in results]) - assert nodes == {"view_model", "table_model_modified_example", "table_model"} - - -class TestConcurrentSelectionTestRunResultsState(BaseRunResultsState): - def test_concurrent_selection_test_run_results_state(self, project): - self.run_and_save_state() - # create failure test case for result:fail selector - self.update_view_model_failing_tests(with_nulls=True) - - # run dbt build again to trigger test errors - self.rebuild_run_dbt(expect_pass=False) - - # get the failures from - results = run_dbt( - [ - "test", - "--select", - "result:fail", - "--exclude", - "not_null_view_model_id", - "--state", - "./state", - ], - expect_pass=False, - ) - assert len(results) == 1 - nodes = set([elem.node.name for elem in results]) - assert nodes == {"unique_view_model_id"} - - -class TestConcurrentSelectionBuildRunResultsState(BaseRunResultsState): - def test_concurrent_selectors_build_run_results_state(self, project): - self.run_and_save_state() - results = run_dbt( - ["build", "--select", "state:modified+", "result:error+", "--state", "./state"] - ) - assert len(results) == 0 - - self.update_view_model_bad_sql() - self.rebuild_run_dbt(expect_pass=False) - - # add a new failing dbt model - bad_sql = "select * from forced_error" - write_file(bad_sql, "models", "table_model_modified_example.sql") - - results = run_dbt( - ["build", "--select", "state:modified+", "result:error+", "--state", "./state"], - expect_pass=False, - ) - assert len(results) == 5 - nodes = set([elem.node.name for elem in results]) - assert nodes == { - "table_model_modified_example", - "view_model", - "table_model", - "not_null_view_model_id", - "unique_view_model_id", - } - - self.update_view_model_failing_tests() - - # create error model case for result:error selector - more_bad_sql = "select 1 as id from not_exists" - write_file(more_bad_sql, "models", "error_model.sql") - - # create something downstream from the error model to rerun - downstream_model_sql = "select * from {{ ref('error_model') }} )" - write_file(downstream_model_sql, "models", "downstream_of_error_model.sql") - - # regenerate build state - self.rebuild_run_dbt(expect_pass=False) - - # modify model again to trigger the state:modified selector - bad_again_sql = "select * from forced_anothererror" - write_file(bad_again_sql, "models", "table_model_modified_example.sql") - - results = run_dbt( - [ - "build", - "--select", - "state:modified+", - "result:error+", - "result:fail+", - "--state", - "./state", - ], - expect_pass=False, - ) - assert len(results) == 4 - nodes = set([elem.node.name for elem in results]) - assert nodes == { - "error_model", - "downstream_of_error_model", - "table_model_modified_example", - "unique_view_model_id", - } From c51b214df31a1211578fa93ae2bc3753c9fbccd0 Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Fri, 10 May 2024 10:24:56 -0500 Subject: [PATCH 068/114] delete tests taht should be in core (#92) --- .../test_custom_materialization.py | 80 ------------------- 1 file changed, 80 deletions(-) delete mode 100644 tests/functional/materializations/test_custom_materialization.py diff --git a/tests/functional/materializations/test_custom_materialization.py b/tests/functional/materializations/test_custom_materialization.py deleted file mode 100644 index 6aa69a4b5..000000000 --- a/tests/functional/materializations/test_custom_materialization.py +++ /dev/null @@ -1,80 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - - -models__model_sql = """ -{{ config(materialized='view') }} -select 1 as id - -""" - - -@pytest.fixture(scope="class") -def models(): - return {"model.sql": models__model_sql} - - -class TestOverrideAdapterDependency: - # make sure that if there's a dependency with an adapter-specific - # materialization, we honor that materialization - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "override-view-adapter-dep"}]} - - def test_adapter_dependency(self, project, override_view_adapter_dep): - run_dbt(["deps"]) - # this should error because the override is buggy - run_dbt(["run"], expect_pass=False) - - -class TestOverrideDefaultDependency: - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "override-view-default-dep"}]} - - def test_default_dependency(self, project, override_view_default_dep): - run_dbt(["deps"]) - # this should error because the override is buggy - run_dbt(["run"], expect_pass=False) - - -class TestOverrideAdapterDependencyPassing: - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "override-view-adapter-pass-dep"}]} - - def test_default_dependency(self, project, override_view_adapter_pass_dep): - run_dbt(["deps"]) - # this should pass because the override is ok - run_dbt(["run"]) - - -class TestOverrideAdapterLocal: - # make sure that the local default wins over the dependency - # adapter-specific - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "override-view-adapter-pass-dep"}]} - - @pytest.fixture(scope="class") - def project_config_update(self): - return {"macro-paths": ["override-view-adapter-macros"]} - - def test_default_dependency( - self, project, override_view_adapter_pass_dep, override_view_adapter_macros - ): - run_dbt(["deps"]) - # this should error because the override is buggy - run_dbt(["run"], expect_pass=False) - - -class TestOverrideDefaultReturn: - @pytest.fixture(scope="class") - def project_config_update(self): - return {"macro-paths": ["override-view-return-no-relation"]} - - def test_default_dependency(self, project, override_view_return_no_relation): - run_dbt(["deps"]) - results = run_dbt(["run"], expect_pass=False) - assert "did not explicitly return a list of relations" in results[0].message From dc5c0f5acb8df8c34d4c56e385aa38cc65b4582f Mon Sep 17 00:00:00 2001 From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Mon, 13 May 2024 15:44:51 -0700 Subject: [PATCH 069/114] delete unneeded list command (#94) --- tests/functional/list/fixtures.py | 213 -------- tests/functional/list/test_list.py | 798 ----------------------------- 2 files changed, 1011 deletions(-) delete mode 100644 tests/functional/list/fixtures.py delete mode 100644 tests/functional/list/test_list.py diff --git a/tests/functional/list/fixtures.py b/tests/functional/list/fixtures.py deleted file mode 100644 index ae5514c62..000000000 --- a/tests/functional/list/fixtures.py +++ /dev/null @@ -1,213 +0,0 @@ -import pytest -from dbt.tests.fixtures.project import write_project_files - - -snapshots__snapshot_sql = """ -{% snapshot my_snapshot %} - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{database}}.{{schema}}.seed -{% endsnapshot %} - -""" - -tests__t_sql = """ -select 1 as id limit 0 - -""" - -models__schema_yml = """ -version: 2 -models: - - name: outer - description: The outer table - columns: - - name: id - description: The id value - data_tests: - - unique - - not_null - -sources: - - name: my_source - tables: - - name: my_table - -""" - -models__ephemeral_sql = """ - -{{ config(materialized='ephemeral') }} - -select - 1 as id, - {{ dbt.date_trunc('day', dbt.current_timestamp()) }} as created_at - -""" - -models__metric_flow = """ - -select - {{ dbt.date_trunc('day', dbt.current_timestamp()) }} as date_day - -""" - -models__incremental_sql = """ -{{ - config( - materialized = "incremental", - incremental_strategy = "delete+insert", - ) -}} - -select * from {{ ref('seed') }} - -{% if is_incremental() %} - where a > (select max(a) from {{this}}) -{% endif %} - -""" - -models__docs_md = """ -{% docs my_docs %} - some docs -{% enddocs %} - -""" - -models__outer_sql = """ -select * from {{ ref('ephemeral') }} - -""" - -models__sub__inner_sql = """ -select * from {{ ref('outer') }} - -""" - -macros__macro_stuff_sql = """ -{% macro cool_macro() %} - wow! -{% endmacro %} - -{% macro other_cool_macro(a, b) %} - cool! -{% endmacro %} - -""" - -seeds__seed_csv = """a,b -1,2 -""" - -analyses__a_sql = """ -select 4 as id - -""" - -semantic_models__sm_yml = """ -semantic_models: - - name: my_sm - model: ref('outer') - defaults: - agg_time_dimension: created_at - entities: - - name: my_entity - type: primary - expr: id - dimensions: - - name: created_at - type: time - type_params: - time_granularity: day - measures: - - name: total_outer_count - agg: count - expr: 1 - -""" - -metrics__m_yml = """ -metrics: - - name: total_outer - type: simple - description: The total count of outer - label: Total Outer - type_params: - measure: total_outer_count -""" - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot.sql": snapshots__snapshot_sql} - - -@pytest.fixture(scope="class") -def tests(): - return {"t.sql": tests__t_sql} - - -@pytest.fixture(scope="class") -def models(): - return { - "schema.yml": models__schema_yml, - "ephemeral.sql": models__ephemeral_sql, - "incremental.sql": models__incremental_sql, - "docs.md": models__docs_md, - "outer.sql": models__outer_sql, - "metricflow_time_spine.sql": models__metric_flow, - "sm.yml": semantic_models__sm_yml, - "m.yml": metrics__m_yml, - "sub": {"inner.sql": models__sub__inner_sql}, - } - - -@pytest.fixture(scope="class") -def macros(): - return {"macro_stuff.sql": macros__macro_stuff_sql} - - -@pytest.fixture(scope="class") -def seeds(): - return {"seed.csv": seeds__seed_csv} - - -@pytest.fixture(scope="class") -def analyses(): - return {"a.sql": analyses__a_sql} - - -@pytest.fixture(scope="class") -def semantic_models(): - return {"sm.yml": semantic_models__sm_yml} - - -@pytest.fixture(scope="class") -def metrics(): - return {"m.yml": metrics__m_yml} - - -@pytest.fixture(scope="class") -def project_files( - project_root, - snapshots, - tests, - models, - macros, - seeds, - analyses, -): - write_project_files(project_root, "snapshots", snapshots) - write_project_files(project_root, "tests", tests) - write_project_files(project_root, "models", models) - write_project_files(project_root, "macros", macros) - write_project_files(project_root, "seeds", seeds) - write_project_files(project_root, "analyses", analyses) diff --git a/tests/functional/list/test_list.py b/tests/functional/list/test_list.py deleted file mode 100644 index f932cba7a..000000000 --- a/tests/functional/list/test_list.py +++ /dev/null @@ -1,798 +0,0 @@ -import json -from os.path import normcase, normpath - -from dbt.logger import log_manager -from dbt.tests.util import run_dbt -import pytest - - -class TestList: - def dir(self, value): - return normpath(value) - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "analysis-paths": [self.dir("analyses")], - "snapshot-paths": [self.dir("snapshots")], - "macro-paths": [self.dir("macros")], - "seed-paths": [self.dir("seeds")], - "test-paths": [self.dir("tests")], - "seeds": { - "quote_columns": False, - }, - } - - def run_dbt_ls(self, args=None, expect_pass=True): - log_manager.stdout_console() - full_args = ["ls"] - if args is not None: - full_args += args - - result = run_dbt(args=full_args, expect_pass=expect_pass) - - log_manager.stdout_console() - return result - - def assert_json_equal(self, json_str, expected): - assert json.loads(json_str) == expected - - def expect_given_output(self, args, expectations): - for key, values in expectations.items(): - ls_result = self.run_dbt_ls(args + ["--output", key]) - if not isinstance(values, (list, tuple)): - values = [values] - assert len(ls_result) == len(values) - for got, expected in zip(ls_result, values): - if key == "json": - self.assert_json_equal(got, expected) - else: - assert got == expected - - def expect_snapshot_output(self, project): - expectations = { - "name": "my_snapshot", - "selector": "test.snapshot.my_snapshot", - "json": { - "name": "my_snapshot", - "package_name": "test", - "depends_on": {"nodes": [], "macros": []}, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "snapshot", - "post-hook": [], - "tags": [], - "pre-hook": [], - "quoting": {}, - "column_types": {}, - "persist_docs": {}, - "target_database": project.database, - "target_schema": project.test_schema, - "unique_key": "id", - "strategy": "timestamp", - "updated_at": "updated_at", - "full_refresh": None, - "database": None, - "schema": None, - "alias": None, - "check_cols": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "meta": {}, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - }, - "unique_id": "snapshot.test.my_snapshot", - "original_file_path": normalize("snapshots/snapshot.sql"), - "alias": "my_snapshot", - "resource_type": "snapshot", - }, - "path": self.dir("snapshots/snapshot.sql"), - } - self.expect_given_output(["--resource-type", "snapshot"], expectations) - - def expect_analyses_output(self): - expectations = { - "name": "a", - "selector": "test.analysis.a", - "json": { - "name": "a", - "package_name": "test", - "depends_on": {"nodes": [], "macros": []}, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "view", - "post-hook": [], - "tags": [], - "pre-hook": [], - "quoting": {}, - "column_types": {}, - "persist_docs": {}, - "full_refresh": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "database": None, - "schema": None, - "alias": None, - "meta": {}, - "unique_key": None, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - }, - "unique_id": "analysis.test.a", - "original_file_path": normalize("analyses/a.sql"), - "alias": "a", - "resource_type": "analysis", - }, - "path": self.dir("analyses/a.sql"), - } - self.expect_given_output(["--resource-type", "analysis"], expectations) - - def expect_model_output(self): - expectations = { - "name": ("ephemeral", "incremental", "inner", "metricflow_time_spine", "outer"), - "selector": ( - "test.ephemeral", - "test.incremental", - "test.sub.inner", - "test.metricflow_time_spine", - "test.outer", - ), - "json": ( - { - "name": "ephemeral", - "package_name": "test", - "depends_on": { - "nodes": [], - "macros": ["macro.dbt.current_timestamp", "macro.dbt.date_trunc"], - }, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "ephemeral", - "post-hook": [], - "tags": [], - "pre-hook": [], - "quoting": {}, - "column_types": {}, - "persist_docs": {}, - "full_refresh": None, - "unique_key": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "database": None, - "schema": None, - "alias": None, - "meta": {}, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - "access": "protected", - }, - "original_file_path": normalize("models/ephemeral.sql"), - "unique_id": "model.test.ephemeral", - "alias": "ephemeral", - "resource_type": "model", - }, - { - "name": "incremental", - "package_name": "test", - "depends_on": { - "nodes": ["seed.test.seed"], - "macros": ["macro.dbt.is_incremental"], - }, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "incremental", - "post-hook": [], - "tags": [], - "pre-hook": [], - "quoting": {}, - "column_types": {}, - "persist_docs": {}, - "full_refresh": None, - "unique_key": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "database": None, - "schema": None, - "alias": None, - "meta": {}, - "grants": {}, - "packages": [], - "incremental_strategy": "delete+insert", - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - "access": "protected", - }, - "original_file_path": normalize("models/incremental.sql"), - "unique_id": "model.test.incremental", - "alias": "incremental", - "resource_type": "model", - }, - { - "name": "inner", - "package_name": "test", - "depends_on": { - "nodes": ["model.test.outer"], - "macros": [], - }, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "view", - "post-hook": [], - "tags": [], - "pre-hook": [], - "quoting": {}, - "column_types": {}, - "persist_docs": {}, - "full_refresh": None, - "unique_key": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "database": None, - "schema": None, - "alias": None, - "meta": {}, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - "access": "protected", - }, - "original_file_path": normalize("models/sub/inner.sql"), - "unique_id": "model.test.inner", - "alias": "inner", - "resource_type": "model", - }, - { - "name": "metricflow_time_spine", - "package_name": "test", - "depends_on": { - "nodes": [], - "macros": ["macro.dbt.current_timestamp", "macro.dbt.date_trunc"], - }, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "view", - "post-hook": [], - "tags": [], - "pre-hook": [], - "quoting": {}, - "column_types": {}, - "persist_docs": {}, - "full_refresh": None, - "unique_key": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "database": None, - "schema": None, - "alias": None, - "meta": {}, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - "access": "protected", - }, - "original_file_path": normalize("models/metricflow_time_spine.sql"), - "unique_id": "model.test.metricflow_time_spine", - "alias": "metricflow_time_spine", - "resource_type": "model", - }, - { - "name": "outer", - "package_name": "test", - "depends_on": { - "nodes": ["model.test.ephemeral"], - "macros": [], - }, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "view", - "post-hook": [], - "tags": [], - "pre-hook": [], - "quoting": {}, - "column_types": {}, - "persist_docs": {}, - "full_refresh": None, - "unique_key": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "database": None, - "schema": None, - "alias": None, - "meta": {}, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - "access": "protected", - }, - "original_file_path": normalize("models/outer.sql"), - "unique_id": "model.test.outer", - "alias": "outer", - "resource_type": "model", - }, - ), - "path": ( - self.dir("models/ephemeral.sql"), - self.dir("models/incremental.sql"), - self.dir("models/sub/inner.sql"), - self.dir("models/metricflow_time_spine.sql"), - self.dir("models/outer.sql"), - ), - } - self.expect_given_output(["--resource-type", "model"], expectations) - - # Do not include ephemeral model - it was not selected - def expect_model_ephemeral_output(self): - expectations = { - "name": ("outer"), - "selector": ("test.outer"), - "json": ( - { - "name": "outer", - "package_name": "test", - "depends_on": {"nodes": [], "macros": []}, - "tags": [], - "config": { - "enabled": True, - "materialized": "view", - "post-hook": [], - "tags": [], - "pre-hook": [], - "quoting": {}, - "column_types": {}, - "persist_docs": {}, - "full_refresh": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "database": None, - "schema": None, - "alias": None, - "meta": {}, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "access": "protected", - }, - "unique_id": "model.test.ephemeral", - "original_file_path": normalize("models/ephemeral.sql"), - "alias": "outer", - "resource_type": "model", - }, - ), - "path": (self.dir("models/outer.sql"),), - } - self.expect_given_output(["--model", "outer"], expectations) - - def expect_source_output(self): - expectations = { - "name": "my_source.my_table", - "selector": "source:test.my_source.my_table", - "json": { - "config": { - "enabled": True, - }, - "unique_id": "source.test.my_source.my_table", - "original_file_path": normalize("models/schema.yml"), - "package_name": "test", - "name": "my_table", - "source_name": "my_source", - "resource_type": "source", - "tags": [], - }, - "path": self.dir("models/schema.yml"), - } - # should we do this --select automatically for a user if if 'source' is - # in the resource types and there is no '--select' or '--exclude'? - self.expect_given_output( - ["--resource-type", "source", "--select", "source:*"], expectations - ) - - def expect_seed_output(self): - expectations = { - "name": "seed", - "selector": "test.seed", - "json": { - "name": "seed", - "package_name": "test", - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "seed", - "post-hook": [], - "tags": [], - "pre-hook": [], - "quoting": {}, - "column_types": {}, - "delimiter": ",", - "persist_docs": {}, - "quote_columns": False, - "full_refresh": None, - "unique_key": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "database": None, - "schema": None, - "alias": None, - "meta": {}, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - }, - "depends_on": {"macros": []}, - "unique_id": "seed.test.seed", - "original_file_path": normalize("seeds/seed.csv"), - "alias": "seed", - "resource_type": "seed", - }, - "path": self.dir("seeds/seed.csv"), - } - self.expect_given_output(["--resource-type", "seed"], expectations) - - def expect_test_output(self): - expectations = { - "name": ("not_null_outer_id", "t", "unique_outer_id"), - "selector": ("test.not_null_outer_id", "test.t", "test.unique_outer_id"), - "json": ( - { - "name": "not_null_outer_id", - "package_name": "test", - "depends_on": { - "nodes": ["model.test.outer"], - "macros": ["macro.dbt.test_not_null"], - }, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "test", - "severity": "ERROR", - "store_failures": None, - "store_failures_as": None, - "warn_if": "!= 0", - "error_if": "!= 0", - "fail_calc": "count(*)", - "where": None, - "limit": None, - "tags": [], - "database": None, - "schema": "dbt_test__audit", - "alias": None, - "meta": {}, - }, - "unique_id": "test.test.not_null_outer_id.a226f4fb36", - "original_file_path": normalize("models/schema.yml"), - "alias": "not_null_outer_id", - "resource_type": "test", - }, - { - "name": "t", - "package_name": "test", - "depends_on": {"nodes": [], "macros": []}, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "test", - "severity": "ERROR", - "store_failures": None, - "store_failures_as": None, - "warn_if": "!= 0", - "error_if": "!= 0", - "fail_calc": "count(*)", - "where": None, - "limit": None, - "tags": [], - "database": None, - "schema": "dbt_test__audit", - "alias": None, - "meta": {}, - }, - "unique_id": "test.test.t", - "original_file_path": normalize("tests/t.sql"), - "alias": "t", - "resource_type": "test", - }, - { - "name": "unique_outer_id", - "package_name": "test", - "depends_on": { - "nodes": ["model.test.outer"], - "macros": ["macro.dbt.test_unique"], - }, - "tags": [], - "config": { - "enabled": True, - "group": None, - "materialized": "test", - "severity": "ERROR", - "store_failures": None, - "store_failures_as": None, - "warn_if": "!= 0", - "error_if": "!= 0", - "fail_calc": "count(*)", - "where": None, - "limit": None, - "tags": [], - "database": None, - "schema": "dbt_test__audit", - "alias": None, - "meta": {}, - }, - "unique_id": "test.test.unique_outer_id.2195e332d3", - "original_file_path": normalize("models/schema.yml"), - "alias": "unique_outer_id", - "resource_type": "test", - }, - ), - "path": ( - self.dir("models/schema.yml"), - self.dir("tests/t.sql"), - self.dir("models/schema.yml"), - ), - } - self.expect_given_output(["--resource-type", "test"], expectations) - - def expect_all_output(self): - # generic test FQNS include the resource + column they're defined on - # models are just package, subdirectory path, name - # sources are like models, ending in source_name.table_name - expected_default = { - "test.ephemeral", - "test.incremental", - "test.snapshot.my_snapshot", - "test.sub.inner", - "test.outer", - "test.seed", - "source:test.my_source.my_table", - "test.not_null_outer_id", - "test.unique_outer_id", - "test.metricflow_time_spine", - "test.t", - "semantic_model:test.my_sm", - "metric:test.total_outer", - } - # analyses have their type inserted into their fqn like tests - expected_all = expected_default | {"test.analysis.a"} - - results = self.run_dbt_ls(["--resource-type", "all", "--select", "*", "source:*"]) - assert set(results) == expected_all - - results = self.run_dbt_ls(["--select", "*", "source:*"]) - assert set(results) == expected_default - - results = self.run_dbt_ls(["--resource-type", "default", "--select", "*", "source:*"]) - assert set(results) == expected_default - - results = self.run_dbt_ls - - def expect_select(self): - results = self.run_dbt_ls(["--resource-type", "test", "--select", "outer"]) - assert set(results) == {"test.not_null_outer_id", "test.unique_outer_id"} - - self.run_dbt_ls(["--resource-type", "test", "--select", "inner"], expect_pass=True) - - results = self.run_dbt_ls(["--resource-type", "test", "--select", "+inner"]) - assert set(results) == {"test.not_null_outer_id", "test.unique_outer_id"} - - results = self.run_dbt_ls(["--resource-type", "semantic_model"]) - assert set(results) == {"semantic_model:test.my_sm"} - - results = self.run_dbt_ls(["--resource-type", "metric"]) - assert set(results) == {"metric:test.total_outer"} - - results = self.run_dbt_ls(["--resource-type", "model", "--select", "outer+"]) - assert set(results) == {"test.outer", "test.sub.inner"} - - results = self.run_dbt_ls(["--resource-type", "model", "--exclude", "inner"]) - assert set(results) == { - "test.ephemeral", - "test.outer", - "test.metricflow_time_spine", - "test.incremental", - } - - results = self.run_dbt_ls(["--select", "config.incremental_strategy:delete+insert"]) - assert set(results) == {"test.incremental"} - - self.run_dbt_ls( - ["--select", "config.incremental_strategy:insert_overwrite"], expect_pass=True - ) - - def expect_resource_type_multiple(self): - """Expect selected resources when --resource-type given multiple times""" - results = self.run_dbt_ls(["--resource-type", "test", "--resource-type", "model"]) - assert set(results) == { - "test.ephemeral", - "test.incremental", - "test.not_null_outer_id", - "test.outer", - "test.sub.inner", - "test.metricflow_time_spine", - "test.t", - "test.unique_outer_id", - } - - results = self.run_dbt_ls( - [ - "--resource-type", - "test", - "--resource-type", - "model", - "--exclude", - "unique_outer_id", - ] - ) - assert set(results) == { - "test.ephemeral", - "test.incremental", - "test.not_null_outer_id", - "test.outer", - "test.metricflow_time_spine", - "test.sub.inner", - "test.t", - } - - results = self.run_dbt_ls( - [ - "--resource-type", - "test", - "--resource-type", - "model", - "--select", - "+inner", - "outer+", - "--exclude", - "inner", - ] - ) - assert set(results) == { - "test.ephemeral", - "test.not_null_outer_id", - "test.unique_outer_id", - "test.outer", - } - - def expect_selected_keys(self, project): - """Expect selected fields of the the selected model""" - expectations = [ - {"database": project.database, "schema": project.test_schema, "alias": "inner"} - ] - results = self.run_dbt_ls( - [ - "--model", - "inner", - "--output", - "json", - "--output-keys", - "database", - "schema", - "alias", - ] - ) - assert len(results) == len(expectations) - - for got, expected in zip(results, expectations): - self.assert_json_equal(got, expected) - - """Expect selected fields when --output-keys given multiple times - """ - expectations = [{"database": project.database, "schema": project.test_schema}] - results = self.run_dbt_ls( - [ - "--model", - "inner", - "--output", - "json", - "--output-keys", - "database", - "--output-keys", - "schema", - ] - ) - assert len(results) == len(expectations) - - for got, expected in zip(results, expectations): - self.assert_json_equal(got, expected) - - """Expect selected fields of the test resource types - """ - expectations = [ - {"name": "not_null_outer_id", "column_name": "id"}, - {"name": "t"}, - {"name": "unique_outer_id", "column_name": "id"}, - ] - results = self.run_dbt_ls( - [ - "--resource-type", - "test", - "--output", - "json", - "--output-keys", - "name", - "column_name", - ] - ) - assert len(results) == len(expectations) - - for got, expected in zip( - sorted(results, key=lambda x: json.loads(x).get("name")), - sorted(expectations, key=lambda x: x.get("name")), - ): - self.assert_json_equal(got, expected) - - """Expect nothing (non-existent keys) for the selected models - """ - expectations = [{}, {}] - results = self.run_dbt_ls( - [ - "--model", - "inner outer", - "--output", - "json", - "--output-keys", - "non_existent_key", - ] - ) - assert len(results) == len(expectations) - - for got, expected in zip(results, expectations): - self.assert_json_equal(got, expected) - - @pytest.mark.skip("The actual is not getting loaded, so all actuals are 0.") - def test_ls(self, project): - self.expect_snapshot_output(project) - self.expect_analyses_output() - self.expect_model_output() - self.expect_source_output() - self.expect_seed_output() - self.expect_test_output() - self.expect_select() - self.expect_resource_type_multiple() - self.expect_all_output() - self.expect_selected_keys(project) - - -def normalize(path): - """On windows, neither is enough on its own: - normcase('C:\\documents/ALL CAPS/subdir\\..') - 'c:\\documents\\all caps\\subdir\\..' - normpath('C:\\documents/ALL CAPS/subdir\\..') - 'C:\\documents\\ALL CAPS' - normpath(normcase('C:\\documents/ALL CAPS/subdir\\..')) - 'c:\\documents\\all caps' - """ - return normcase(normpath(path)) From 3aba80e4e0c93d3395d681d2caf776bb24df9d41 Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Mon, 13 May 2024 17:33:31 -0600 Subject: [PATCH 070/114] Cross-database `date` macro (#81) Co-authored-by: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> --- .changes/unreleased/Features-20240501-151856.yaml | 6 ++++++ tests/functional/shared_tests/test_utils.py | 5 +++++ 2 files changed, 11 insertions(+) create mode 100644 .changes/unreleased/Features-20240501-151856.yaml diff --git a/.changes/unreleased/Features-20240501-151856.yaml b/.changes/unreleased/Features-20240501-151856.yaml new file mode 100644 index 000000000..2dda71939 --- /dev/null +++ b/.changes/unreleased/Features-20240501-151856.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Cross-database `date` macro +time: 2024-05-01T15:18:56.758715-06:00 +custom: + Author: dbeatty10 + Issue: 82 diff --git a/tests/functional/shared_tests/test_utils.py b/tests/functional/shared_tests/test_utils.py index 9934a240a..3811638ad 100644 --- a/tests/functional/shared_tests/test_utils.py +++ b/tests/functional/shared_tests/test_utils.py @@ -8,6 +8,7 @@ from dbt.tests.adapter.utils.test_cast_bool_to_text import BaseCastBoolToText from dbt.tests.adapter.utils.test_concat import BaseConcat from dbt.tests.adapter.utils.test_current_timestamp import BaseCurrentTimestampAware +from dbt.tests.adapter.utils.test_date import BaseDate from dbt.tests.adapter.utils.test_dateadd import BaseDateAdd from dbt.tests.adapter.utils.test_datediff import BaseDateDiff from dbt.tests.adapter.utils.test_date_spine import BaseDateSpine @@ -69,6 +70,10 @@ class TestCurrentTimestamp(BaseCurrentTimestampAware): pass +class TestDate(BaseDate): + pass + + class TestDateSpine(BaseDateSpine): pass From 862e8e9e68a9fc452110db3fd97e246fb8705bb0 Mon Sep 17 00:00:00 2001 From: Doug Beatty <44704949+dbeatty10@users.noreply.github.com> Date: Tue, 14 May 2024 13:15:09 -0600 Subject: [PATCH 071/114] Import relevant pytest(s) for cross-database `cast` macro (#77) --- .changes/unreleased/Features-20240430-185700.yaml | 6 ++++++ tests/functional/shared_tests/test_utils.py | 5 +++++ 2 files changed, 11 insertions(+) create mode 100644 .changes/unreleased/Features-20240430-185700.yaml diff --git a/.changes/unreleased/Features-20240430-185700.yaml b/.changes/unreleased/Features-20240430-185700.yaml new file mode 100644 index 000000000..638d10625 --- /dev/null +++ b/.changes/unreleased/Features-20240430-185700.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add tests for cross-database `cast` macro +time: 2024-04-30T18:57:00.437045-06:00 +custom: + Author: dbeatty10 + Issue: "76" diff --git a/tests/functional/shared_tests/test_utils.py b/tests/functional/shared_tests/test_utils.py index 3811638ad..d1a8ea1d0 100644 --- a/tests/functional/shared_tests/test_utils.py +++ b/tests/functional/shared_tests/test_utils.py @@ -5,6 +5,7 @@ from dbt.tests.adapter.utils.test_array_concat import BaseArrayConcat from dbt.tests.adapter.utils.test_array_construct import BaseArrayConstruct from dbt.tests.adapter.utils.test_bool_or import BaseBoolOr +from dbt.tests.adapter.utils.test_cast import BaseCast from dbt.tests.adapter.utils.test_cast_bool_to_text import BaseCastBoolToText from dbt.tests.adapter.utils.test_concat import BaseConcat from dbt.tests.adapter.utils.test_current_timestamp import BaseCurrentTimestampAware @@ -58,6 +59,10 @@ class TestBoolOr(BaseBoolOr): pass +class TestCast(BaseCast): + pass + + class TestCastBoolToText(BaseCastBoolToText): pass From e23d0a7bb7d4a32598778922ce28578ad4be0e77 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Tue, 14 May 2024 20:46:23 -0700 Subject: [PATCH 072/114] Fix the semicolon semantics for indexes while respecting other bug fix. (#97) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> --- .../unreleased/Fixes-20240514-193201.yaml | 6 +++++ .../relations/materialized_view/alter.sql | 3 ++- .../relations/materialized_view/create.sql | 2 +- tests/functional/test_multiple_indexes.py | 27 +++++++++++++++++++ 4 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 .changes/unreleased/Fixes-20240514-193201.yaml create mode 100644 tests/functional/test_multiple_indexes.py diff --git a/.changes/unreleased/Fixes-20240514-193201.yaml b/.changes/unreleased/Fixes-20240514-193201.yaml new file mode 100644 index 000000000..95ab24673 --- /dev/null +++ b/.changes/unreleased/Fixes-20240514-193201.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix the semicolon semantics for indexes while respecting other bug fix +time: 2024-05-14T19:32:01.149383-07:00 +custom: + Author: versusfacit + Issue: "85" diff --git a/dbt/include/postgres/macros/relations/materialized_view/alter.sql b/dbt/include/postgres/macros/relations/materialized_view/alter.sql index ee53c1136..429b7e53a 100644 --- a/dbt/include/postgres/macros/relations/materialized_view/alter.sql +++ b/dbt/include/postgres/macros/relations/materialized_view/alter.sql @@ -30,13 +30,14 @@ {%- if _index_change.action == "drop" -%} - {{ postgres__get_drop_index_sql(relation, _index.name) }}; + {{ postgres__get_drop_index_sql(relation, _index.name) }} {%- elif _index_change.action == "create" -%} {{ postgres__get_create_index_sql(relation, _index.as_node_config) }} {%- endif -%} + {{ ';' if not loop.last else "" }} {%- endfor -%} diff --git a/dbt/include/postgres/macros/relations/materialized_view/create.sql b/dbt/include/postgres/macros/relations/materialized_view/create.sql index 17e5cb064..89c18234b 100644 --- a/dbt/include/postgres/macros/relations/materialized_view/create.sql +++ b/dbt/include/postgres/macros/relations/materialized_view/create.sql @@ -2,7 +2,7 @@ create materialized view if not exists {{ relation }} as {{ sql }}; {% for _index_dict in config.get('indexes', []) -%} - {{- get_create_index_sql(relation, _index_dict) -}} + {{- get_create_index_sql(relation, _index_dict) -}}{{ ';' if not loop.last else "" }} {%- endfor -%} {% endmacro %} diff --git a/tests/functional/test_multiple_indexes.py b/tests/functional/test_multiple_indexes.py new file mode 100644 index 000000000..1d30a6d48 --- /dev/null +++ b/tests/functional/test_multiple_indexes.py @@ -0,0 +1,27 @@ +import pytest + +from tests.functional.utils import run_dbt + + +REF_MULTIPLE_INDEX_MODEL = """ +{{ + config( + materialized="materialized_view", + indexes=[ + {"columns": ["foo"], "type": "btree"}, + {"columns": ["bar"], "type": "btree"}, + ], + ) +}} + +SELECT 1 AS foo, 2 AS bar +""" + + +class TestUnrestrictedPackageAccess: + @pytest.fixture(scope="class") + def models(self): + return {"index_test.sql": REF_MULTIPLE_INDEX_MODEL} + + def test_unrestricted_protected_ref(self, project): + run_dbt() From e308e2a682110c08ee3593e0a8e964fba61e3bc7 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Mon, 20 May 2024 18:35:36 -0400 Subject: [PATCH 073/114] Add docker release to the full release process for final releases (#51) Co-authored-by: Emily Rockman <emily.rockman@dbtlabs.com> Co-authored-by: Emily Rockman <ebuschang@gmail.com> --- .github/dependabot.yml | 5 + .github/workflows/release.yml | 115 ++++-- .github/workflows/release_prep_hatch.yml | 467 +++++++++++++++++++++++ docker/Dockerfile | 37 ++ docker/README.md | 58 +++ docker/dev.Dockerfile | 54 +++ pyproject.toml | 6 + 7 files changed, 710 insertions(+), 32 deletions(-) create mode 100644 .github/workflows/release_prep_hatch.yml create mode 100644 docker/Dockerfile create mode 100644 docker/README.md create mode 100644 docker/dev.Dockerfile diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 4673f47cf..ae2be43aa 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -10,3 +10,8 @@ updates: schedule: interval: "weekly" rebase-strategy: "disabled" + - package-ecosystem: "docker" + directory: "/docker" + schedule: + interval: "weekly" + rebase-strategy: "disabled" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 27fb9f4f3..1139380a8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,60 +3,111 @@ name: Release on: workflow_dispatch: inputs: - deploy-to: - type: choice - description: Choose where to publish (test/prod) - options: - - prod - - test - default: prod - ref: - description: "The ref (sha or branch name) to use" + branch: + description: "The branch to release from" type: string default: "main" + version: + description: "The version to release" required: true + type: string + deploy-to: + description: "Deploy to test or prod" + type: environment + default: prod + only_docker: + description: "Only release Docker image, skip GitHub & PyPI" + type: boolean + default: false -permissions: read-all +permissions: + contents: write # this is the permission that allows creating a new release # will cancel previous workflows triggered by the same event and for the same ref for PRs or same SHA otherwise concurrency: - group: ${{ github.workflow }}-${{ github.event_name }}-${{ contains(github.event_name, 'pull_request') && github.event.pull_request.head.ref || github.sha }}-${{ inputs.deploy-to }} + group: "${{ github.workflow }}-${{ github.event_name }}-${{ inputs.version }}-${{ inputs.deploy-to }}" cancel-in-progress: true jobs: - release: - name: PyPI - ${{ inputs.deploy-to }} - runs-on: ubuntu-latest - environment: - name: ${{ inputs.deploy-to }} - url: ${{ vars.PYPI_PROJECT_URL }} - permissions: - id-token: write # IMPORTANT: this permission is mandatory for trusted publishing + release-prep: + name: "Release prep: generate changelog, bump version" + uses: dbt-labs/dbt-postgres/.github/workflows/release_prep_hatch.yml@main + with: + branch: ${{ inputs.branch }} + version: ${{ inputs.version }} + deploy-to: ${{ inputs.deploy-to }} + secrets: inherit + build-release: + name: "Build release" + needs: release-prep + runs-on: ubuntu-latest + outputs: + archive-name: ${{ steps.archive.outputs.name }} steps: - - name: Check out repository + - name: "Checkout ${{ github.event.repository.name }}@${{ needs.release-prep.outputs.release-branch }}" uses: actions/checkout@v4 with: + ref: ${{ needs.release-prep.outputs.release-branch }} persist-credentials: false - ref: "${{ inputs.ref }}" - - name: Setup `hatch` + - name: "Setup `hatch`" uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main - - name: Inputs - id: release-inputs + - name: "Set archive name" + id: archive run: | - version=$(hatch version) - archive_name=dbt-postgres-$version-${{ inputs.deploy-to }} - echo "archive-name=$archive_name" >> $GITHUB_OUTPUT + archive_name=${{ github.event.repository.name }}-${{ inputs.version }}-${{ inputs.deploy-to }} + echo "name=$archive_name" >> $GITHUB_OUTPUT - - name: Build `dbt-postgres` + - name: "Build ${{ github.event.repository.name }}" uses: dbt-labs/dbt-adapters/.github/actions/build-hatch@main with: - archive-name: ${{ steps.release-inputs.outputs.archive-name }} + archive-name: ${{ steps.archive.outputs.name }} - - name: Publish to PyPI + pypi-release: + name: "PyPI release" + if: ${{ !failure() && !cancelled() && !inputs.only_docker }} + runs-on: ubuntu-latest + needs: build-release + environment: + name: ${{ inputs.deploy-to }} + url: ${{ vars.PYPI_PROJECT_URL }} + permissions: + # this permission is required for trusted publishing + # see https://github.com/marketplace/actions/pypi-publish + id-token: write + steps: + - name: "Publish to PyPI" uses: dbt-labs/dbt-adapters/.github/actions/publish-pypi@main with: - pypi-repository-url: ${{ vars.PYPI_REPOSITORY_URL }} - archive-name: ${{ steps.release-inputs.outputs.archive-name }} + repository-url: ${{ vars.PYPI_REPOSITORY_URL }} + archive-name: ${{ needs.build-release.outputs.archive-name }} + + github-release: + name: "GitHub release" + if: ${{ !failure() && !cancelled() && !inputs.only_docker }} + needs: + - build-release + - release-prep + uses: dbt-labs/dbt-adapters/.github/workflows/github-release.yml@main + with: + sha: ${{ needs.release-prep.outputs.release-sha }} + version_number: ${{ inputs.version }} + changelog_path: ${{ needs.release-prep.outputs.changelog-path }} + test_run: ${{ inputs.deploy-to == 'test' }} + archive_name: ${{ needs.build-release.outputs.archive-name }} + + docker-release: + name: "Docker release" + # We cannot release to docker on a test run because it uses the tag in GitHub as + # what we need to release but draft releases don't actually tag the commit so it + # finds nothing to release + if: ${{ !failure() && !cancelled() && (inputs.deploy-to == 'prod' || inputs.only_docker) }} + needs: github-release # docker relies on the published tag from github-release + permissions: + packages: write # this permission is required for publishing to GHCR + uses: dbt-labs/dbt-release/.github/workflows/release-docker.yml@main + with: + version_number: ${{ inputs.version }} + test_run: ${{ inputs.deploy-to == 'test' }} diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml new file mode 100644 index 000000000..37129c97d --- /dev/null +++ b/.github/workflows/release_prep_hatch.yml @@ -0,0 +1,467 @@ +# **what?** +# Perform the version bump, generate the changelog and run tests. +# +# Inputs: +# branch: The branch that we will release from +# version: The release version number (i.e. 1.0.0b1, 1.2.3rc2, 1.0.0) +# deploy-to: If we are deploying to prod or test, if test then release from branch +# is-nightly-release: Identifier that this is nightly release +# +# Outputs: +# release-sha: The sha that will actually be released. This can differ from the +# input sha if adding a version bump and/or changelog +# changelog-path: Path to the changelog file (ex .changes/1.2.3-rc1.md) +# +# Branching strategy: +# - During execution workflow execution the temp branch will be generated. +# - For normal runs the temp branch will be removed once changes were merged to target branch; +# - For test runs we will keep temp branch and will use it for release; +# Naming strategy: +# - For normal runs: prep-release/${{ inputs.deploy-to}}/${{ inputs.version }}_$GITHUB_RUN_ID +# - For nightly releases: prep-release/nightly-release/${{ inputs.version }}_$GITHUB_RUN_ID +# +# **why?** +# Reusable and consistent GitHub release process. +# +# **when?** +# Call when ready to kick off a build and release +# +# Validation Checks +# +# 1. Bump the version if it has not been bumped +# 2. Generate the changelog (via changie) if there is no markdown file for this version +name: "Release prep" +run-name: "Release prep: Generate changelog and bump ${{ inputs.package }} to ${{ inputs.version }} for release to ${{ inputs.deploy-to }}" +on: + workflow_call: + inputs: + branch: + description: "The branch to release from" + type: string + default: "main" + version: + description: "The version to release" + required: true + type: string + deploy-to: + description: "Deploy to test or prod" + type: string + default: "prod" + is-nightly-release: + description: "Identify if this is a nightly release" + type: boolean + default: false + outputs: + release-branch: + description: "The branch to be released from" + value: ${{ jobs.release.outputs.branch }} + release-sha: + description: "The SHA to be released" + value: ${{ jobs.release.outputs.sha }} + changelog-path: + description: "The path to the changelog from the repo root for this version, e.g. .changes/1.8.0-b1.md" + value: ${{ jobs.release-inputs.outputs.changelog-path }} + secrets: + FISHTOWN_BOT_PAT: + description: "Token to commit/merge changes into branches" + required: true + IT_TEAM_MEMBERSHIP: + description: "Token that can view org level teams" + required: true + +permissions: + contents: write + +defaults: + run: + shell: bash + +env: + PYTHON_TARGET_VERSION: 3.11 + NOTIFICATION_PREFIX: "[Release Prep]" + +jobs: + release-inputs: + runs-on: ubuntu-latest + outputs: + changelog-path: ${{ steps.changelog.outputs.path }} + changelog-exists: ${{ steps.changelog.outputs.exists }} + base-version: ${{ steps.semver.outputs.base-version }} + pre-release: ${{ steps.semver.outputs.pre-release }} + is-pre-release: ${{ steps.semver.outputs.is-pre-release }} + version-is-current: ${{ steps.version.outputs.is-current }} + + steps: + - name: "[DEBUG] Log inputs" + run: | + # WORKFLOW INPUTS + echo Branch: ${{ inputs.branch }} + echo Release version: ${{ inputs.version }} + echo Deploy to: ${{ inputs.deploy-to }} + echo Nightly release: ${{ inputs.is-nightly-release }} + # ENVIRONMENT VARIABLES + echo Python version: ${{ env.PYTHON_TARGET_VERSION }} + echo Notification prefix: ${{ env.NOTIFICATION_PREFIX }} + + - name: "Checkout ${{ github.event.repository.name }}@${{ inputs.branch }}" + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + + - name: "Setup `hatch`" + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main + + - name: "Parse input version" + id: semver + uses: dbt-labs/actions/parse-semver@v1.1.0 + with: + version: ${{ inputs.version }} + + - name: "Audit version" + id: version + run: | + is_current=false + current_version=$(hatch version) + if test "$current_version" = "${{ inputs.version }}" + then + is_current=true + fi + echo "is-current=$is_updated" >> $GITHUB_OUTPUT + + - name: "[INFO] Skip version bump" + if: steps.version.outputs.is-current == 'true' + run: | + title="Skip version bump" + message="The version matches the input version ${{ inputs.version }}, skipping version bump" + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + + - name: "Audit changelog" + id: changelog + run: | + path=".changes/" + if [[ ${{ steps.semver.outputs.is-pre-release }} -eq 1 ]] + then + path+="${{ steps.semver.outputs.base-version }}-${{ steps.semver.outputs.pre-release }}.md" + else + path+="${{ steps.semver.outputs.base-version }}.md" + fi + echo "path=$path" >> $GITHUB_OUTPUT + + does_exist=false + if test -f $path + then + does_exist=true + fi + echo "exists=$does_exist">> $GITHUB_OUTPUT + + - name: "[INFO] Skip changelog generation" + if: steps.changelog.outputs.exists == 'true' + run: | + title="Skip changelog generation" + message="A changelog already exists at ${{ steps.changelog.outputs.path }}, skipping generating changelog" + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + + release-branch: + runs-on: ubuntu-latest + needs: release-inputs + if: | + needs.release-inputs.outputs.changelog-exists == 'false' || + needs.release-inputs.outputs.version-is-current == 'false' + outputs: + name: ${{ steps.release-branch.outputs.name }} + + steps: + - name: "Checkout ${{ github.event.repository.name }}@${{ inputs.branch }}" + uses: actions/checkout@v4 + with: + ref: ${{ inputs.branch }} + + - name: "Set release branch" + id: release-branch + run: | + name="prep-release/" + if [[ ${{ inputs.is-nightly-release }} == true ]] + then + name+="nightly-release/" + else + name+="${{ inputs.deploy-to }}/" + fi + name+="${{ inputs.version }}_$GITHUB_RUN_ID" + echo "name=$name" >> $GITHUB_OUTPUT + + - name: "Create release branch ${{ steps.release-branch.outputs.name }}" + run: | + git checkout -b ${{ steps.release-branch.outputs.name }} + git push -u origin ${{ steps.release-branch.outputs.name }} + + - name: "[INFO] Create release branch" + run: | + title="Create release branch" + message="Create release branch: ${{ steps.release-branch.outputs.name }}" + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + + core-team: + if: needs.release-inputs.outputs.changelog-exists == 'false' + uses: dbt-labs/actions/.github/workflows/determine-team-membership.yml@main + with: + github_team: "core-group" + + generate-changelog: + runs-on: ubuntu-latest + if: needs.release-inputs.outputs.changelog-exists == 'false' + # only runs if we need to make changes, determined by not skipping release-branch + needs: + - release-inputs + - release-branch + - core-team + + steps: + - name: "Checkout ${{ github.event.repository.name }}@${{ needs.release-branch.outputs.name }}" + uses: actions/checkout@v3 + with: + ref: ${{ needs.release-branch.outputs.name }} + + - name: "Setup `hatch`" + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main + + - name: "Install `changie`" + run: | + brew tap miniscruff/changie https://github.com/miniscruff/changie + brew install changie + + - name: "Generate changelog at ${{ needs.release-inputs.outputs.changelog-path }}" + run: | + if [[ ${{ needs.release-inputs.outputs.is-pre-release }} -eq 1 ]] + then + changie batch ${{ needs.release-inputs.outputs.base-version }} \ + --move-dir '${{ needs.release-inputs.outputs.base-version }}' \ + --prerelease ${{ needs.release-inputs.outputs.pre-release }} + elif [[ -d ".changes/${{ needs.release-inputs.outputs.base-version }}" ]] + then + changie batch ${{ needs.release-inputs.outputs.base-version }} \ + --include '${{ needs.release-inputs.outputs.base-version }}' \ + --remove-prereleases + else # releasing a final patch with no pre-releases + changie batch ${{ needs.release-inputs.outputs.base-version }} + fi + changie merge + env: + CHANGIE_CORE_TEAM: ${{ needs.core-team.outputs.team_membership }} + + - name: "Remove trailing whitespace and missing new lines" + # this step will fail on whitespace errors but also correct them + continue-on-error: true + run: hatch run code-quality + + - name: "Commit & push changes" + run: | + git config user.name "$USER" + git config user.email "$EMAIL" + git pull + git add . + git commit -m "$COMMIT_MESSAGE" + git push + env: + USER: "GitHub Build Bot" + EMAIL: "buildbot@fishtownanalytics.com" + COMMIT_MESSAGE: "Generate changelog at ${{ needs.release-inputs.outputs.changelog-path }}" + + - name: "[INFO] Generated changelog at ${{ needs.release-inputs.outputs.changelog-path }}" + run: | + title="Changelog generation" + if [[ -f ${{ needs.release-inputs.outputs.changelog-path }} ]] + then + message="Generated changelog file successfully" + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + else + message="Failed to generate changelog file" + echo "::error title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + exit 1 + fi + + bump-version: + runs-on: ubuntu-latest + if: needs.release-inputs.outputs.version-is-current == 'false' + # only runs if we need to make changes, determined by not skipping release-branch + needs: + - release-inputs + - release-branch + - generate-changelog + + steps: + - name: "Checkout ${{ github.event.repository.name }}@${{ needs.release-branch.outputs.name }}" + uses: actions/checkout@v3 + with: + ref: ${{ needs.release-branch.outputs.name }} + + - name: "Setup `hatch`" + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main + + - name: "Bump version to ${{ inputs.version }}" + run: hatch version ${{ inputs.version }} + + - name: "Commit & push changes" + run: | + git config user.name "$USER" + git config user.email "$EMAIL" + git pull + git add . + git commit -m "$COMMIT_MESSAGE" + git push + env: + USER: "GitHub Build Bot" + EMAIL: "buildbot@fishtownanalytics.com" + COMMIT_MESSAGE: "Bump version to ${{ inputs.version }}" + + - name: "[INFO] Bumped version to ${{ inputs.version }}" + run: | + title="Version bump" + message="Bumped version to ${{ inputs.version }}" + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + + unit-tests: + runs-on: ubuntu-latest + # only run unit tests if we created a release branch and already bumped the version and generated the changelog + if: | + !failure() && !cancelled() && + needs.release-branch.outputs.name != '' + needs: + - release-branch + - generate-changelog + - bump-version + + steps: + - name: "Checkout ${{ github.event.repository.name }}@${{ needs.release-branch.outputs.name }}" + uses: actions/checkout@v4 + with: + ref: ${{ needs.release-branch.outputs.name }} + + - name: "Setup `hatch`" + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main + + - name: "Run unit tests" + run: hatch run unit-tests:all + + integration-tests: + runs-on: ubuntu-latest + # only run integration tests if we created a release branch and already bumped the version and generated the changelog + if: | + !failure() && !cancelled() && + needs.release-branch.outputs.name != '' + needs: + - release-branch + - generate-changelog + - bump-version + + services: + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + steps: + - name: "Checkout ${{ github.event.repository.name }}@${{ needs.release-branch.outputs.name }}" + uses: actions/checkout@v4 + with: + ref: ${{ needs.release-branch.outputs.name }} + + - name: Setup postgres + run: psql -f ./scripts/setup_test_database.sql + env: + PGHOST: localhost + PGPORT: 5432 + PGUSER: postgres + PGPASSWORD: postgres + PGDATABASE: postgres + + - name: "Set up `hatch`" + uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main + + - name: "Run integration tests" + run: hatch run integration-tests:all + env: + POSTGRES_TEST_HOST: localhost + POSTGRES_TEST_PORT: 5432 + POSTGRES_TEST_USER: root + POSTGRES_TEST_PASS: password + POSTGRES_TEST_DATABASE: dbt + POSTGRES_TEST_THREADS: 4 + + merge-release-branch: + runs-on: ubuntu-latest + needs: + - unit-tests + - integration-tests + - release-branch + - release-inputs + if: | + !failure() && !cancelled() && + needs.release-branch.result == 'success' && + inputs.deploy-to == 'prod' + + steps: + - name: "Checkout ${{ github.event.repository.name }}" + uses: actions/checkout@v3 + + - name: "Merge changes into ${{ inputs.branch }}" + uses: everlytic/branch-merge@1.1.5 + with: + source_ref: ${{ needs.release-branch.outputs.name }} + target_branch: ${{ inputs.branch }} + github_token: ${{ secrets.FISHTOWN_BOT_PAT }} + commit_message_template: "[Automated] Merged {source_ref} into target {target_branch} during release process" + + - name: "[INFO] Merge changes into ${{ inputs.branch }}" + run: | + title="Merge changes" + message="Merge ${{ needs.release-branch.outputs.name }} into ${{ inputs.branch }}" + echo "::notice title=${{ env.NOTIFICATION_PREFIX }}: $title::$message" + + release: + runs-on: ubuntu-latest + needs: + - release-branch + - merge-release-branch + if: ${{ !failure() && !cancelled() }} + + # Get the SHA that will be released. + # If the changelog already exists and the version was already current on the input branch, then release from there. + # Otherwise, we generated a changelog and/or did the version bump in this workflow and there is a + # new sha to use from the merge we just did. Grab that here instead. + outputs: + branch: ${{ steps.branch.outputs.name }} + sha: ${{ steps.sha.outputs.sha }} + + steps: + - name: "Set release branch" + id: branch + # If a release branch was created and not merged, use the release branch + # Otherwise, use the input branch because either nothing was done, or the changes were merged back in + run: | + if [ ${{ needs.release-branch.result == 'success' }} && ${{ needs.merge-release-branch.result == 'skipped' }} ]; then + branch="${{ needs.release-branch.outputs.name }}" + else + branch="${{ inputs.branch }}" + fi + echo "name=$branch" >> $GITHUB_OUTPUT + + - name: "Checkout ${{ github.event.repository.name }}@${{ steps.branch.outputs.name }}" + uses: actions/checkout@v3 + with: + ref: ${{ steps.branch.outputs.name }} + + - name: "Set release SHA" + id: sha + run: echo "sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT + + # if this is a real release and a release branch was created, delete it + - name: "Delete release branch: ${{ needs.branch.outputs.name }}" + if: ${{ inputs.deploy-to == 'prod' && inputs.is-nightly-release == 'false' && needs.release-branch.outputs.name != '' }} + run: git push origin -d ${{ needs.branch.outputs.name }} diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 000000000..7c8dc14ee --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,37 @@ +# this image gets published to GHCR for production use +ARG py_version=3.11.2 + +FROM python:$py_version-slim-bullseye as base + +RUN apt-get update \ + && apt-get dist-upgrade -y \ + && apt-get install -y --no-install-recommends \ + build-essential=12.9 \ + ca-certificates=20210119 \ + git=1:2.30.2-1+deb11u2 \ + libpq-dev=13.14-0+deb11u1 \ + make=4.3-4.1 \ + openssh-client=1:8.4p1-5+deb11u3 \ + software-properties-common=0.96.20.2-2.1 \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* + +ENV PYTHONIOENCODING=utf-8 +ENV LANG=C.UTF-8 + +RUN python -m pip install --upgrade "pip==24.0" "setuptools==69.2.0" "wheel==0.43.0" --no-cache-dir + + +FROM base as dbt-postgres + +ARG commit_ref=main + +HEALTHCHECK CMD dbt --version || exit 1 + +WORKDIR /usr/app/dbt/ +ENTRYPOINT ["dbt"] + +RUN python -m pip install --no-cache-dir "dbt-postgres @ git+https://github.com/dbt-labs/dbt-postgres@${commit_ref}" diff --git a/docker/README.md b/docker/README.md new file mode 100644 index 000000000..22af3fe93 --- /dev/null +++ b/docker/README.md @@ -0,0 +1,58 @@ +# Docker for dbt +This docker file is suitable for building dbt Docker images locally or using with CI/CD to automate populating a container registry. + + +## Building an image: +This Dockerfile can create images for the following target: `dbt-postgres` + +In order to build a new image, run the following docker command. +```shell +docker build --tag <your_image_name> --target dbt-postgres <path/to/dockerfile> +``` +--- +> **Note:** Docker must be configured to use [BuildKit](https://docs.docker.com/develop/develop-images/build_enhancements/) in order for images to build properly! + +--- + +By default the image will be populated with the latest version of `dbt-postgres` on `main`. +If you need to use a different version you can specify it by git ref using the `--build-arg` flag: +```shell +docker build --tag <your_image_name> \ + --target dbt-postgres \ + --build-arg commit_ref=<commit_ref> \ + <path/to/dockerfile> +``` + +### Examples: +To build an image named "my-dbt" that supports Snowflake using the latest releases: +```shell +cd dbt-core/docker +docker build --tag my-dbt --target dbt-postgres . +``` + +To build an image named "my-other-dbt" that supports Snowflake using the adapter version 1.0.0b1: +```shell +cd dbt-core/docker +docker build \ + --tag my-other-dbt \ + --target dbt-postgres \ + --build-arg commit_ref=v1.0.0b1 \ + . +``` + +## Running an image in a container: +The `ENTRYPOINT` for this Dockerfile is the command `dbt` so you can bind-mount your project to `/usr/app` and use dbt as normal: +```shell +docker run \ + --network=host \ + --mount type=bind,source=path/to/project,target=/usr/app \ + --mount type=bind,source=path/to/profiles.yml,target=/root/.dbt/profiles.yml \ + my-dbt \ + ls +``` +--- +**Notes:** +* Bind-mount sources _must_ be an absolute path +* You may need to make adjustments to the docker networking setting depending on the specifics of your data warehouse/database host. + +--- diff --git a/docker/dev.Dockerfile b/docker/dev.Dockerfile new file mode 100644 index 000000000..a7d2eca35 --- /dev/null +++ b/docker/dev.Dockerfile @@ -0,0 +1,54 @@ +# this image does not get published, it is intended for local development only, see `Makefile` for usage +FROM ubuntu:22.04 as base + +# prevent python installation from asking for time zone region +ARG DEBIAN_FRONTEND=noninteractive + +# add python repository +RUN apt-get update \ + && apt-get install -y software-properties-common=0.99.22.9 \ + && add-apt-repository -y ppa:deadsnakes/ppa \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* + +# install python +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + build-essential=12.9ubuntu3 \ + git-all=1:2.34.1-1ubuntu1.10 \ + libpq-dev=14.11-0ubuntu0.22.04.1 \ + python3.8=3.8.19-1+jammy1 \ + python3.8-dev=3.8.19-1+jammy1 \ + python3.8-distutils=3.8.19-1+jammy1 \ + python3.8-venv=3.8.19-1+jammy1 \ + python3-pip=22.0.2+dfsg-1ubuntu0.4 \ + python3-wheel=0.37.1-2ubuntu0.22.04.1 \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* + +# update the default system interpreter to the newly installed version +RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 1 + +# install python dependencies +RUN python3 -m pip install --upgrade --no-cache-dir "hatch==1.9.1" + + +FROM base as dbt-postgres-dev + +HEALTHCHECK CMD python3 --version || exit 1 + +# send stdout/stderr to terminal +ENV PYTHONUNBUFFERED=1 + +# setup mount for local code +WORKDIR /opt/code +VOLUME /opt/code + +# create a virtual environment +RUN python3 -m venv /opt/venv diff --git a/pyproject.toml b/pyproject.toml index 0e93423cb..a99829d99 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -65,6 +65,12 @@ dependencies = [ [tool.hatch.envs.default.scripts] dev = "pre-commit install" code-quality = "pre-commit run --all-files" +docker-dev = [ + "echo Does not support integration testing, only development and unit testing. See issue https://github.com/dbt-labs/dbt-postgres/issues/99", + "docker build -f docker/dev.Dockerfile -t dbt-postgres-dev .", + "docker run --rm -it --name dbt-postgres-dev -v $(pwd):/opt/code dbt-postgres-dev", +] +docker-prod = "docker build -f docker/Dockerfile -t dbt-postgres ." [tool.hatch.envs.unit-tests] dependencies = [ From 86349f04dc585f24f1031011b44ed852aed94e9b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 May 2024 01:00:55 -0400 Subject: [PATCH 074/114] Bump ubuntu from 22.04 to 24.04 in /docker (#100) Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- docker/dev.Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/dev.Dockerfile b/docker/dev.Dockerfile index a7d2eca35..e137ff846 100644 --- a/docker/dev.Dockerfile +++ b/docker/dev.Dockerfile @@ -1,5 +1,5 @@ # this image does not get published, it is intended for local development only, see `Makefile` for usage -FROM ubuntu:22.04 as base +FROM ubuntu:24.04 as base # prevent python installation from asking for time zone region ARG DEBIAN_FRONTEND=noninteractive From 0717373f7caa064e4d9be3b08d9693e647319db3 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 23 May 2024 15:08:16 -0400 Subject: [PATCH 075/114] Fix release workflow (#106) --- .github/workflows/release_prep_hatch.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml index 37129c97d..8e9ded04a 100644 --- a/.github/workflows/release_prep_hatch.yml +++ b/.github/workflows/release_prep_hatch.yml @@ -126,7 +126,7 @@ jobs: then is_current=true fi - echo "is-current=$is_updated" >> $GITHUB_OUTPUT + echo "is-current=$is_current" >> $GITHUB_OUTPUT - name: "[INFO] Skip version bump" if: steps.version.outputs.is-current == 'true' @@ -202,9 +202,11 @@ jobs: core-team: if: needs.release-inputs.outputs.changelog-exists == 'false' + needs: release-inputs uses: dbt-labs/actions/.github/workflows/determine-team-membership.yml@main with: github_team: "core-group" + secrets: inherit generate-changelog: runs-on: ubuntu-latest @@ -445,7 +447,7 @@ jobs: # If a release branch was created and not merged, use the release branch # Otherwise, use the input branch because either nothing was done, or the changes were merged back in run: | - if [ ${{ needs.release-branch.result == 'success' }} && ${{ needs.merge-release-branch.result == 'skipped' }} ]; then + if [[ ${{ needs.release-branch.result == 'success' }} && ${{ needs.merge-release-branch.result == 'skipped' }} ]]; then branch="${{ needs.release-branch.outputs.name }}" else branch="${{ inputs.branch }}" From 96dd860b7b857c93736cba00d71280a5ec2cb37c Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 23 May 2024 17:54:31 -0400 Subject: [PATCH 076/114] Add a changelog entry check for pull requests (#108) --- .github/workflows/changelog-entry-check.yml | 29 +++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 .github/workflows/changelog-entry-check.yml diff --git a/.github/workflows/changelog-entry-check.yml b/.github/workflows/changelog-entry-check.yml new file mode 100644 index 000000000..889c09952 --- /dev/null +++ b/.github/workflows/changelog-entry-check.yml @@ -0,0 +1,29 @@ +name: Changelog entry check + +on: + pull_request: + types: + - opened + - reopened + - labeled + - unlabeled + - synchronize + +defaults: + run: + shell: bash + +permissions: + contents: read + pull-requests: write + +jobs: + changelog-entry-check: + uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main + with: + changelog_comment: >- + Thank you for your pull request! We could not find a changelog entry for this change. + For details on how to document a change, see the + [dbt-postgres contributing guide](https://github.com/dbt-labs/dbt-postgres/blob/main/CONTRIBUTING.md). + skip_label: "Skip Changelog" + secrets: inherit From 007e8b8221b95e35c337de4a21300e235af68790 Mon Sep 17 00:00:00 2001 From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Tue, 11 Jun 2024 10:36:32 -0700 Subject: [PATCH 077/114] delete more Core only Tests (#114) --- .../data/results/v4/run_results.json | 1 - .../data/results/v5/run_results.json | 1 - .../data/results/v6/run_results.json | 1 - .../artifacts/data/state/v1/manifest.json | 1 - .../artifacts/data/state/v10/manifest.json | 1 - .../artifacts/data/state/v11/manifest.json | 1 - .../artifacts/data/state/v12/manifest.json | 1 - .../artifacts/data/state/v2/manifest.json | 1 - .../artifacts/data/state/v3/manifest.json | 1 - .../artifacts/data/state/v4/manifest.json | 1 - .../artifacts/data/state/v5/manifest.json | 1 - .../artifacts/data/state/v6/manifest.json | 1 - .../artifacts/data/state/v7/manifest.json | 1 - .../artifacts/data/state/v8/manifest.json | 1 - .../artifacts/data/state/v9/manifest.json | 1 - .../functional/artifacts/expected_manifest.py | 1938 ----------------- .../artifacts/expected_run_results.py | 243 --- .../artifacts/test_artifact_fields.py | 52 - tests/functional/artifacts/test_artifacts.py | 621 ------ .../artifacts/test_docs_generate_defer.py | 43 - tests/functional/artifacts/test_override.py | 38 - .../artifacts/test_previous_version_state.py | 454 ---- .../functional/artifacts/test_run_results.py | 74 - tests/functional/build_command/fixtures.py | 302 --- tests/functional/build_command/test_build.py | 211 -- tests/functional/cli/test_cli_exit_codes.py | 37 - .../cli/test_env_var_deprecations.py | 60 - tests/functional/cli/test_error_handling.py | 20 - tests/functional/cli/test_multioption.py | 143 -- tests/functional/cli/test_resolvers.py | 36 - tests/functional/configs/fixtures.py | 201 -- tests/functional/configs/test_configs.py | 141 -- .../configs/test_configs_in_schema_files.py | 256 --- .../configs/test_contract_configs.py | 527 ----- .../test_custom_node_colors_configs.py | 341 --- .../configs/test_disabled_configs.py | 90 - .../functional/configs/test_disabled_model.py | 390 ---- tests/functional/configs/test_dupe_paths.py | 74 - tests/functional/configs/test_get_default.py | 26 - .../functional/configs/test_grant_configs.py | 155 -- tests/functional/configs/test_indiv_tests.py | 58 - .../functional/configs/test_unused_configs.py | 52 - .../duplicates/test_duplicate_analysis.py | 32 - .../duplicates/test_duplicate_exposure.py | 30 - .../duplicates/test_duplicate_macro.py | 71 - .../duplicates/test_duplicate_metric.py | 40 - .../duplicates/test_duplicate_model.py | 263 --- .../duplicates/test_duplicate_resource.py | 33 - .../duplicates/test_duplicate_source.py | 26 - .../test_missing_strategy_snapshot.py | 51 - 50 files changed, 7144 deletions(-) delete mode 100644 tests/functional/artifacts/data/results/v4/run_results.json delete mode 100644 tests/functional/artifacts/data/results/v5/run_results.json delete mode 100644 tests/functional/artifacts/data/results/v6/run_results.json delete mode 100644 tests/functional/artifacts/data/state/v1/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v10/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v11/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v12/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v2/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v3/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v4/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v5/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v6/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v7/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v8/manifest.json delete mode 100644 tests/functional/artifacts/data/state/v9/manifest.json delete mode 100644 tests/functional/artifacts/expected_manifest.py delete mode 100644 tests/functional/artifacts/expected_run_results.py delete mode 100644 tests/functional/artifacts/test_artifact_fields.py delete mode 100644 tests/functional/artifacts/test_artifacts.py delete mode 100644 tests/functional/artifacts/test_docs_generate_defer.py delete mode 100644 tests/functional/artifacts/test_override.py delete mode 100644 tests/functional/artifacts/test_previous_version_state.py delete mode 100644 tests/functional/artifacts/test_run_results.py delete mode 100644 tests/functional/build_command/fixtures.py delete mode 100644 tests/functional/build_command/test_build.py delete mode 100644 tests/functional/cli/test_cli_exit_codes.py delete mode 100644 tests/functional/cli/test_env_var_deprecations.py delete mode 100644 tests/functional/cli/test_error_handling.py delete mode 100644 tests/functional/cli/test_multioption.py delete mode 100644 tests/functional/cli/test_resolvers.py delete mode 100644 tests/functional/configs/fixtures.py delete mode 100644 tests/functional/configs/test_configs.py delete mode 100644 tests/functional/configs/test_configs_in_schema_files.py delete mode 100644 tests/functional/configs/test_contract_configs.py delete mode 100644 tests/functional/configs/test_custom_node_colors_configs.py delete mode 100644 tests/functional/configs/test_disabled_configs.py delete mode 100644 tests/functional/configs/test_disabled_model.py delete mode 100644 tests/functional/configs/test_dupe_paths.py delete mode 100644 tests/functional/configs/test_get_default.py delete mode 100644 tests/functional/configs/test_grant_configs.py delete mode 100644 tests/functional/configs/test_indiv_tests.py delete mode 100644 tests/functional/configs/test_unused_configs.py delete mode 100644 tests/functional/duplicates/test_duplicate_analysis.py delete mode 100644 tests/functional/duplicates/test_duplicate_exposure.py delete mode 100644 tests/functional/duplicates/test_duplicate_macro.py delete mode 100644 tests/functional/duplicates/test_duplicate_metric.py delete mode 100644 tests/functional/duplicates/test_duplicate_model.py delete mode 100644 tests/functional/duplicates/test_duplicate_resource.py delete mode 100644 tests/functional/duplicates/test_duplicate_source.py delete mode 100644 tests/functional/simple_snapshot/test_missing_strategy_snapshot.py diff --git a/tests/functional/artifacts/data/results/v4/run_results.json b/tests/functional/artifacts/data/results/v4/run_results.json deleted file mode 100644 index 0767eb8e8..000000000 --- a/tests/functional/artifacts/data/results/v4/run_results.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v4.json", "dbt_version": "1.6.7", "generated_at": "2023-11-06T20:40:37.557735Z", "invocation_id": "42f85a60-4f7b-4cc1-a197-62687104fecc", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:40:37.486980Z", "completed_at": "2023-11-06T20:40:37.488837Z"}, {"name": "execute", "started_at": "2023-11-06T20:40:37.490290Z", "completed_at": "2023-11-06T20:40:37.539787Z"}], "thread_id": "Thread-9 (worker)", "execution_time": 0.0566411018371582, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.my_model"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:40:37.485334Z", "completed_at": "2023-11-06T20:40:37.489266Z"}, {"name": "execute", "started_at": "2023-11-06T20:40:37.494545Z", "completed_at": "2023-11-06T20:40:37.542811Z"}], "thread_id": "Thread-8 (worker)", "execution_time": 0.060118675231933594, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.metricflow_time_spine"}], "elapsed_time": 0.18144583702087402, "args": {"defer": false, "indirect_selection": "eager", "select": [], "log_level_file": "debug", "use_colors": true, "cache_selected_only": false, "strict_mode": false, "use_colors_file": true, "partial_parse_file_diff": true, "static_parser": true, "write_json": true, "warn_error_options": {"include": [], "exclude": []}, "print": true, "log_level": "info", "profiles_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-16/profile0", "log_path": "/Users/jerco/dev/product/dbt-core/logs/test16993032361853467608", "partial_parse": true, "quiet": false, "log_format_file": "debug", "version_check": true, "send_anonymous_usage_stats": false, "project_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-16/project0", "log_format": "default", "enable_legacy_logger": false, "exclude": [], "populate_cache": true, "log_file_max_bytes": 10485760, "macro_debugging": false, "printer_width": 80, "invocation_command": "dbt tests/functional/artifacts/test_previous_version_state.py::TestPreviousVersionState", "which": "run", "favor_state": false, "introspect": true, "vars": {}}} diff --git a/tests/functional/artifacts/data/results/v5/run_results.json b/tests/functional/artifacts/data/results/v5/run_results.json deleted file mode 100644 index 63a7a58ea..000000000 --- a/tests/functional/artifacts/data/results/v5/run_results.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v5.json", "dbt_version": "1.8.0a1", "generated_at": "2023-11-06T20:43:08.231028Z", "invocation_id": "a9238a29-6764-47f0-ba7d-f7d61ae5e6c0", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:43:08.146847Z", "completed_at": "2023-11-06T20:43:08.149862Z"}, {"name": "execute", "started_at": "2023-11-06T20:43:08.151676Z", "completed_at": "2023-11-06T20:43:08.206208Z"}], "thread_id": "Thread-9 (worker)", "execution_time": 0.06433510780334473, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.my_model", "compiled": true, "compiled_code": "select 1 as id", "relation_name": "\"dbt\".\"test16993033859513627134_test_previous_version_state\".\"my_model\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:43:08.144982Z", "completed_at": "2023-11-06T20:43:08.150320Z"}, {"name": "execute", "started_at": "2023-11-06T20:43:08.155222Z", "completed_at": "2023-11-06T20:43:08.209881Z"}], "thread_id": "Thread-8 (worker)", "execution_time": 0.06822013854980469, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.metricflow_time_spine", "compiled": true, "compiled_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "relation_name": "\"dbt\".\"test16993033859513627134_test_previous_version_state\".\"metricflow_time_spine\""}], "elapsed_time": 0.18284392356872559, "args": {"send_anonymous_usage_stats": false, "profiles_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-19/profile0", "static_parser": true, "partial_parse_file_diff": true, "printer_width": 80, "log_level_file": "debug", "project_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-19/project0", "log_format": "default", "strict_mode": false, "macro_debugging": false, "indirect_selection": "eager", "version_check": true, "use_colors_file": true, "select": [], "log_file_max_bytes": 10485760, "warn_error_options": {"include": [], "exclude": []}, "log_format_file": "debug", "invocation_command": "dbt tests/functional/artifacts/test_previous_version_state.py::TestPreviousVersionState", "write_json": true, "log_level": "info", "cache_selected_only": false, "quiet": false, "favor_state": false, "enable_legacy_logger": false, "log_path": "/Users/jerco/dev/product/dbt-core/logs/test16993033859513627134", "which": "run", "partial_parse": true, "introspect": true, "show_resource_report": false, "exclude": [], "populate_cache": true, "vars": {}, "use_colors": true, "defer": false, "print": true}} diff --git a/tests/functional/artifacts/data/results/v6/run_results.json b/tests/functional/artifacts/data/results/v6/run_results.json deleted file mode 100644 index f78176c93..000000000 --- a/tests/functional/artifacts/data/results/v6/run_results.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v6.json", "dbt_version": "1.8.0a1", "generated_at": "2023-12-06T18:53:19.641690Z", "invocation_id": "ad4ef714-e6c6-425e-b7c8-c1c4369df4ea", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-12-06T18:53:19.554953Z", "completed_at": "2023-12-06T18:53:19.559711Z"}, {"name": "execute", "started_at": "2023-12-06T18:53:19.564874Z", "completed_at": "2023-12-06T18:53:19.620151Z"}], "thread_id": "Thread-8", "execution_time": 0.06995701789855957, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.metricflow_time_spine", "compiled": true, "compiled_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "relation_name": "\"dbt\".\"test17018887966812726006_test_previous_version_state\".\"metricflow_time_spine\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-12-06T18:53:19.557019Z", "completed_at": "2023-12-06T18:53:19.559247Z"}, {"name": "execute", "started_at": "2023-12-06T18:53:19.561000Z", "completed_at": "2023-12-06T18:53:19.622080Z"}], "thread_id": "Thread-9", "execution_time": 0.07100677490234375, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.my_model", "compiled": true, "compiled_code": "select 1 as id", "relation_name": "\"dbt\".\"test17018887966812726006_test_previous_version_state\".\"my_model\""}], "elapsed_time": 0.13903093338012695, "args": {"print": true, "log_level_file": "debug", "quiet": false, "warn_error_options": {"include": [], "exclude": []}, "write_json": true, "invocation_command": "dbt --cov=core --cov-append --cov-report=xml tests/functional/artifacts/test_previous_version_state.py", "log_level": "info", "select": [], "project_dir": "/private/var/folders/67/r0f0jlj54h95zl3fhmb217jh0000gp/T/pytest-of-william/pytest-68/project0", "static_parser": true, "log_file_max_bytes": 10485760, "empty": false, "introspect": true, "log_format_file": "debug", "vars": {}, "strict_mode": false, "indirect_selection": "eager", "show_resource_report": false, "favor_state": false, "version_check": true, "cache_selected_only": false, "enable_legacy_logger": false, "partial_parse": true, "profiles_dir": "/private/var/folders/67/r0f0jlj54h95zl3fhmb217jh0000gp/T/pytest-of-william/pytest-68/profile0", "defer": false, "printer_width": 80, "send_anonymous_usage_stats": false, "use_colors": true, "log_path": "/Users/william/git/dbt-core/logs/test17018887966812726006", "partial_parse_file_diff": true, "populate_cache": true, "macro_debugging": false, "use_colors_file": true, "log_format": "default", "which": "run", "exclude": []}} diff --git a/tests/functional/artifacts/data/state/v1/manifest.json b/tests/functional/artifacts/data/state/v1/manifest.json deleted file mode 100644 index 2811b7456..000000000 --- a/tests/functional/artifacts/data/state/v1/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v1.json", "dbt_version": "0.19.2", "generated_at": "2022-06-08T05:12:57.550908Z", "invocation_id": "57566e21-fbd4-4848-87ca-d05ddbd9012e", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "materialized": "view", "persist_docs": {}, "vars": {}, "quoting": {}, "column_types": {}, "alias": null, "schema": null, "database": null, "tags": [], "full_refresh": null, "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}}}, "sources": {}, "macros": {"macro.test.drop_relation": {"unique_id": "macro.test.drop_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(dbt_labs_materialized_views.drop_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.test.postgres__list_relations_without_caching": {"unique_id": "macro.test.postgres__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.postgres__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.test.postgres_get_relations": {"unique_id": "macro.test.postgres_get_relations", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(dbt_labs_materialized_views.postgres_get_relations()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres_get_relations"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.test.redshift__list_relations_without_caching": {"unique_id": "macro.test.redshift__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "redshift__list_relations_without_caching", "macro_sql": "{% macro redshift__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.redshift__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.redshift__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.test.load_relation": {"unique_id": "macro.test.load_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(dbt_labs_materialized_views.redshift_load_relation_or_mv(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence = 'p' -- [p]ermanent table. Other values are [u]nlogged table, [t]emporary table\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% for column_name in column_dict %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.column_list": {"unique_id": "macro.dbt.column_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list", "macro_sql": "{% macro column_list(columns) %}\n {%- for col in columns %}\n {{ col.name }} {% if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.column_list_for_create_table": {"unique_id": "macro.dbt.column_list_for_create_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list_for_create_table", "macro_sql": "{% macro column_list_for_create_table(columns) %}\n {%- for col in columns %}\n {{ col.name }} {{ col.data_type }} {%- if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n ;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/fishtown-analytics/dbt/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = snapshot_string_as_time(now) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.run_query", "macro.dbt.snapshot_string_as_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_schema", "macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.basic_load_csv_rows": {"unique_id": "macro.dbt.basic_load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "basic_load_csv_rows", "macro_sql": "{% macro basic_load_csv_rows(model, batch_size, agate_table) %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n %s\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_seed_column_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n {{ return(basic_load_csv_rows(model, 10000, agate_table) )}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.basic_load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.incremental_upsert": {"unique_id": "macro.dbt.incremental_upsert", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/helpers.sql", "original_file_path": "macros/materializations/incremental/helpers.sql", "name": "incremental_upsert", "macro_sql": "{% macro incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name=\"main\") %}\n {%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') -%}\n\n {%- if unique_key is not none -%}\n delete\n from {{ target_relation }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ tmp_relation }}\n );\n {%- endif %}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ tmp_relation }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/incremental.sql", "original_file_path": "macros/materializations/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(this) %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% elif existing_relation.is_view or should_full_refresh() %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set backup_identifier = existing_relation.identifier ~ \"__dbt_backup\" %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n {% do adapter.drop_relation(backup_relation) %}\n\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.load_relation", "macro.dbt.make_temp_relation", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.should_full_refresh", "macro.dbt.run_query", "macro.dbt.incremental_upsert", "macro.dbt.statement", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column in dest_columns -%}\n {{ adapter.quote(column.name) }} = DBT_INTERNAL_SOURCE.{{ adapter.quote(column.name) }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.common_get_delete_insert_merge_sql": {"unique_id": "macro.dbt.common_get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "common_get_delete_insert_merge_sql", "macro_sql": "{% macro common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n );\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.common_get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/table/table.sql", "original_file_path": "macros/materializations/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_table_as(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/view.sql", "original_file_path": "macros/materializations/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch(\"handle_existing_table\", packages=['dbt'])(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view(run_outside_transaction_hooks=True) %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {% if run_outside_transaction_hooks %}\n -- no transactions on BigQuery\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n {% endif %}\n\n -- `BEGIN` happens here on Snowflake\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if run_outside_transaction_hooks %}\n -- No transactions on BigQuery\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n {% endif %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_alias.sql", "original_file_path": "macros/etc/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/query.sql", "original_file_path": "macros/etc/query.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/is_incremental.sql", "original_file_path": "macros/etc/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, to, field) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('from')) %}\n\n\nselect count(*) as validation_errors\nfrom (\n select {{ column_name }} as id from {{ model }}\n) as child\nleft join (\n select {{ field }} as id from {{ to }}\n) as parent on parent.id = child.id\nwhere child.id is not null\n and parent.id is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "test_relationships", "macro_sql": "{% macro test_relationships(model, to, field) %}\n {% set macro = adapter.dispatch('test_relationships') %}\n {{ macro(model, to, field, **kwargs) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n\nselect count(*) as validation_errors\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "test_not_null", "macro_sql": "{% macro test_not_null(model) %}\n {% set macro = adapter.dispatch('test_not_null') %}\n {{ macro(model, **kwargs) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n\nselect count(*) as validation_errors\nfrom (\n\n select\n {{ column_name }}\n\n from {{ model }}\n where {{ column_name }} is not null\n group by {{ column_name }}\n having count(*) > 1\n\n) validation_errors\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "test_unique", "macro_sql": "{% macro test_unique(model) %}\n {% set macro = adapter.dispatch('test_unique') %}\n {{ macro(model, **kwargs) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, values) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('field')) %}\n{% set quote_values = kwargs.get('quote', True) %}\n\nwith all_values as (\n\n select distinct\n {{ column_name }} as value_field\n\n from {{ model }}\n\n),\n\nvalidation_errors as (\n\n select\n value_field\n\n from all_values\n where value_field not in (\n {% for value in values -%}\n {% if quote_values -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n )\n)\n\nselect count(*) as validation_errors\nfrom validation_errors\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "test_accepted_values", "macro_sql": "{% macro test_accepted_values(model, values) %}\n {% set macro = adapter.dispatch('test_accepted_values') %}\n {{ macro(model, values, **kwargs) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": []}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--models` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/overview)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [chat](https://community.getdbt.com/) on Slack for live questions and support."}}, "exposures": {}, "selectors": {}, "disabled": [], "parent_map": {"model.test.my_model": []}, "child_map": {"model.test.my_model": []}} diff --git a/tests/functional/artifacts/data/state/v10/manifest.json b/tests/functional/artifacts/data/state/v10/manifest.json deleted file mode 100644 index 9c73cf337..000000000 --- a/tests/functional/artifacts/data/state/v10/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v10.json", "dbt_version": "1.6.6", "generated_at": "2023-10-11T20:49:37.080431Z", "invocation_id": "e2f630c5-769a-47a2-89ce-294a00e14e1a", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.543413, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.456355, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16970573770617803847_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16970573770617803847_test_previous_version_state"}, "created_at": 1697057377.471309, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.492032, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1697057377.508335, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.525708, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/79/5290gpvn3lx5jdryk4844rm80000gn/T/pytest-of-quigleymalcolm/pytest-271/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.552852, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.553834, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1697057377.594166}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.099874, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1000938, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1002848, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1005828, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.10079, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1009028, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.101016, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.101125, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1022131, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1028638, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1119502, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.112461, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.112787, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.113112, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.113596, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.114043, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.114221, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1145759, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1149912, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1158679, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1160781, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.116409, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.116695, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.117132, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.117368, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.117985, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118195, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118315, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118505, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118647, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_materialized_view_as_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.120726, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.121023, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql": {"name": "postgres__get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{- get_create_materialized_view_as_sql(intermediate_relation, sql) -}}\n\n {% if existing_relation is not none %}\n alter materialized view {{ existing_relation }} rename to {{ backup_relation.include(database=False, schema=False) }};\n {% endif %}\n\n alter materialized view {{ intermediate_relation }} rename to {{ relation.include(database=False, schema=False) }};\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.121473, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.121773, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1218822, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.122449, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.122707, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.123094, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.123816, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.124038, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.124678, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.127991, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.128149, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1286578, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.129076, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.130199, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.130403, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.130552, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1307, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1308448, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.131226, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.131537, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1318662, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.13231, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.132591, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.136333, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.13651, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1367402, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1374788, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.13765, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.137829, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1392791, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.140613, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.144871, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.14516, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.145334, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1454248, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1455739, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.145694, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.145904, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.146819, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1470149, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.147279, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.147718, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.154073, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.156199, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.156668, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.156987, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.157378, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.157763, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.159425, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1599932, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.160686, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1609302, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1617038, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.168492, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.170215, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.170489, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.171521, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.171801, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.172469, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.173121, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.17403, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174277, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174471, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174779, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174974, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.17528, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.175472, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.175742, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.175937, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.176092, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.176376, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1815941, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1874628, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.188724, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.189962, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1908412, "supported_languages": null}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.196028, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1964319, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.196692, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_materialized_view_as_sql(target_relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_materialized_view_as_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1980631, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.198303, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.198978, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "original_file_path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"<change_category>\": [{\"action\": \"<name>\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.19941, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "original_file_path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.19956, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.200053, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.200245, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2005591, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2006972, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_as_sql": {"name": "get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "unique_id": "macro.dbt.get_replace_materialized_view_as_sql", "macro_sql": "{% macro get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{- log('Applying REPLACE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_replace_materialized_view_as_sql', 'dbt')(relation, sql, existing_relation, backup_relation, intermediate_relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2011251, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_as_sql": {"name": "default__get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_as_sql", "macro_sql": "{% macro default__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2013052, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.201658, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.201805, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202045, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202231, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202538, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202677, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2059531, "supported_languages": ["sql"]}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2070122, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2071402, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.207693, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2078662, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2080052, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.209325, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.209718, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.210073, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2131069, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.214011, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2142022, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.214653, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2153769, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2158551, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2160509, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.216245, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.219257, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.219564, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.219792, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2213418, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.221772, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2219388, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.222131, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.222568, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.225961, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.231348, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.232269, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.232512, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233001, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2331991, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233336, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233479, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233599, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233762, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233883, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2343712, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2345622, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.236012, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2364511, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.236854, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.237386, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.237652, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.237949, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.238351, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.238611, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.238965, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2392662, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.239514, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2400918, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.241607, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.242203, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2425091, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2444658, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.245748, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.246516, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2467651, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2469969, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.247075, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2474089, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2475772, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.247827, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.247957, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.248211, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.248318, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2486641, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.248833, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2490602, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2491379, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.249404, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2495492, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.249847, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.249984, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2506409, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.251061, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.251404, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.251571, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.25186, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252075, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252336, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252498, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2527459, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252909, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.25316, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.253269, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2535648, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2537038, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.253951, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.254057, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.254996, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2551548, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2553222, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.255476, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.255645, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2558029, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.255967, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256154, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256317, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256474, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256723, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2568839, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.257049, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2572002, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2574809, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.257617, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2578712, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.257977, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.258348, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.258624, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.258776, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259321, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259488, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259715, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259997, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.260129, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.26051, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.260766, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2610502, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261183, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261564, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261753, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261918, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.262178, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2626739, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.262827, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.262975, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.263083, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.263253, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.26333, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2635038, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.263676, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.264548, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.264692, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.264854, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265263, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265459, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265602, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265763, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265894, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.268777, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.268948, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.269174, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.269548, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.269803, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270133, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270325, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2704918, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270706, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270998, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.271255, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.271829, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.27207, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.27222, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.272419, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.272854, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.273275, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2735639, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.273793, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.275553, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2756748, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2758532, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.275969, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.276321, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.276514, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2766201, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.276926, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.277123, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.277358, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2775512, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.277785, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.278485, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.278682, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2789361, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2791739, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2803478, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2808928, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.281089, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.281228, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.281921, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2820952, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.282303, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.282475, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.282752, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.283254, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2848241, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.285086, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2852778, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.285444, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.285694, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.28595, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2861598, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2864761, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.286675, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2868428, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2877948, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {%- if relation.is_table -%}\n {{- drop_table(relation) -}}\n {%- elif relation.is_view -%}\n {{- drop_view(relation) -}}\n {%- elif relation.is_materialized_view -%}\n {{- drop_materialized_view(relation) -}}\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endif -%}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.drop_table", "macro.dbt.drop_view", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.28826, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288448, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288556, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288737, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288845, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.289025, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.289133, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.291446, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2916129, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2919302, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2921631, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.292375, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.292562, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {{ col_err.append(col['name']) }}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.293418, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2937758, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2939641, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.294328, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2945652, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.295179, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.295451, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.296254, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.29796, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2981188, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.298965, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.299389, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.299982, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.30047, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.3005419, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.3010602, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.3013, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.301594, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.301873, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1697057377.578206}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1697057377.583621, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1697057377.5840042, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1697057377.585288, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}, "alias": null, "offset_window": null, "offset_to_grain": null}]}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1697057377.5861351, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.4547698, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16970573770617803847_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16970573770617803847_test_previous_version_state", "enabled": false}, "created_at": 1697057377.4774349, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16970573770617803847_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.489575, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.5060952, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.558094, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1697057377.5790222}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1697057377.584552, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.5646772, "config_call_dict": {}, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/79/5290gpvn3lx5jdryk4844rm80000gn/T/pytest-of-quigleymalcolm/pytest-271/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1697057377.5942788}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test16970573770617803847_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1697057377.5929039, "config": {"enabled": true}, "primary_entity": null}}} diff --git a/tests/functional/artifacts/data/state/v11/manifest.json b/tests/functional/artifacts/data/state/v11/manifest.json deleted file mode 100644 index 2d57234cf..000000000 --- a/tests/functional/artifacts/data/state/v11/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v11.json", "dbt_version": "1.8.0a1", "generated_at": "2023-12-05T16:53:23.890718Z", "invocation_id": "6ab55e79-96b3-4825-ad9f-e1d1da5a1ba3", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.698763, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795201.711199, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17017951992510102999_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17017951992510102999_test_previous_version_state"}, "created_at": 1701795201.845236, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.01357, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1701795202.213242, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.463294, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/jk/wc60sy6551568b9mkw_01h9r0000gn/T/pytest-of-emily/pytest-179/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.822232, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1701795202.8247292, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1701795203.1697412}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6199858, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.620511, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6209228, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.621485, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.621939, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.622278, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6225321, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6227798, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.625891, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6267462, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.627949, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.628255, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6437912, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.64499, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.645702, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6464581, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.647518, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.648478, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.64887, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6498601, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.651049, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.653446, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.653924, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.654655, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.655276, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.656255, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6567712, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.658122, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.658636, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.658905, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.659279, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.659605, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.660476, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.661935, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.662252, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.66293, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6632478, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.663639, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.665281, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.666545, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.667615, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6688828, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.669285, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.67079, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.671197, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.671505, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.672849, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.67325, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.673802, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6751292, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.682186, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.682699, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.684223, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.685657, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6880362, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6885052, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.688853, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.689187, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6895208, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.69028, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.69104, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.6918972, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.692802, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.693482, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.700966, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7015522, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.702199, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.70386, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.704243, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.704649, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.707978, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.711052, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.717665, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7189279, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.719371, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.719593, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7199302, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7201998, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.720671, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7225971, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7230332, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.723613, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.724629, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.738543, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7443469, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7464151, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.747135, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.74761, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as actual_or_expected\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as actual_or_expected\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.748797, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.749613, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.750813, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n {%- endfor -%}\n\n {% set unit_test_sql = get_unit_test_sql(sql, get_expected_sql(expected_rows, column_name_to_data_types), tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.get_expected_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.754615, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.7630222, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.763915, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.764492, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.768254, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.769085, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.770625, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.776339, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.782128, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.785681, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.786989, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.788492, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.789021, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.790645, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8009338, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.805187, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.805814, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.808223, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8088698, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.810357, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.811747, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.81349, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8140302, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.814534, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.815238, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8156958, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.816755, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.817258, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8179939, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.818708, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.819102, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8197608, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.830284, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.839777, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.842585, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8455942, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.847949, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8489118, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8492649, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8502321, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8505979, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.858089, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.864974, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.873394, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.875537, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.876073, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.877249, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.877687, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.877996, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.878316, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.878582, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8789499, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8792262, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.880359, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8807812, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.884084, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8857841, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.887074, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8884811, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8898969, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8908532, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.892451, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8935628, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8955379, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8970492, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.897825, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8986602, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.8993979, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9021928, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.906515, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9075258, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.908553, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9093351, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9099782, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9110131, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9116, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.913279, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.914389, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.914904, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.915583, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.916468, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.917393, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.918984, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.920189, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9210382, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9216352, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9222558, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.922518, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.923136, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.923624, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9247959, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.92559, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.926479, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.926847, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.928088, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9285948, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"<change_category>\": [{\"action\": \"<name>\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9293408, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.930051, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.931433, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.932239, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.934562, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9352589, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.93693, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.937391, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.937782, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.940798, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.942014, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.942831, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.943433, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.943681, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9442549, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9447072, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.945371, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.945725, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.948194, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.94883, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9502022, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.952827, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.954375, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.95489, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.955314, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.955926, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.956181, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.957737, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.958209, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.960974, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9614248, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.962048, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9626722, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.963051, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.96443, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.965119, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.966122, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.967995, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9693792, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9701362, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9706838, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.971901, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.974993, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.976295, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.976956, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.98135, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.985949, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.987921, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.988501, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.989037, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9892218, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.990579, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9921122, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.99267, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.9934888, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.994245, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.994626, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.995157, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.995448, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.997557, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795200.999308, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.000303, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.002402, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.003065, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.003328, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0040388, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.004419, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0049028, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.005106, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.00577, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.006273, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0068932, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.007206, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.008418, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0093498, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.010061, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.010436, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.011046, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.011369, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0119638, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.012336, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.012935, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.013579, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0144289, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.014882, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0164359, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.017073, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.018199, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.018629, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.020689, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.021079, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0216, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.021996, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.022377, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.022727, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0231, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0235069, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0238762, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.02422, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.02458, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0250869, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.025496, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.025868, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0265422, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.026857, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0273962, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.027641, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.028342, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0289578, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0293171, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.030658, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.031221, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.032276, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.033396, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.033823, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0352662, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0362182, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.037148, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.037907, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0390232, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.039777, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.040294, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.040722, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.041665, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.042021, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0423522, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.042599, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0429802, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.043158, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.043534, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0439239, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.045585, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0459142, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.046274, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.047541, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.048099, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0486348, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0491462, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.049642, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.053334, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.053748, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0542579, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.054944, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0555499, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.056275, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.056695, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.057075, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.057626, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.059056, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.059603, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.059943, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.060834, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.061783, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.062391, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.062944, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.066784, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.06715, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.067581, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0679018, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.069325, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.069806, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0700648, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.070583, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.071019, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0715342, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.071965, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0724852, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0740328, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0744698, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0752409, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.075805, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.078434, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.079551, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.07999, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0803041, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.081865, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.082284, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.082762, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0831811, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.08413, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.085705, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.090453, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.091082, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.09168, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.092282, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.092702, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.093062, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.093466, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.094017, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.094474, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.095205, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.095628, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0959911, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0963771, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.096725, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.0972311, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.097627, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.102133, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.102572, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.103314, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.103957, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.104427, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.104844, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.107417, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1082602, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.10869, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.10948, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.10999, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1114, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.111994, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.11379, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this) -%}\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this) }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.120623, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * FROM dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.122299, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n\n{#-- wrap yaml strings in quotes, apply cast --#}\n{%- for column_name, column_value in row.items() -%}\n{% set row_update = {column_name: column_value} %}\n{%- if column_value is string -%}\n{%- set row_update = {column_name: safe_cast(dbt.string_literal(column_value), column_name_to_data_types[column_name]) } -%}\n{%- elif column_value is none -%}\n{%- set row_update = {column_name: safe_cast('null', column_name_to_data_types[column_name]) } -%}\n{%- else -%}\n{%- set row_update = {column_name: safe_cast(column_value, column_name_to_data_types[column_name]) } -%}\n{%- endif -%}\n{%- do row.update(row_update) -%}\n{%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.124015, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1272519, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.127626, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.129449, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1304069, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.131742, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.132842, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.133022, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.1345239, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.135215, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.136147, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1701795201.136801, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1701795203.004519}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_filters": [{"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1701795203.088031, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1701795203.08911, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1701795203.0918, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}]}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1701795203.093819, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795201.707621, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17017951992510102999_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17017951992510102999_test_previous_version_state", "enabled": false}, "created_at": 1701795201.899797, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17017951992510102999_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795202.007819, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795202.207177, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17017951992510102999_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795202.8354402, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1701795203.007275}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1701795203.090345, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test17017951992510102999_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1701795202.8690538, "config_call_dict": {}, "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/jk/wc60sy6551568b9mkw_01h9r0000gn/T/pytest-of-emily/pytest-179/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1701795203.1703029}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test17017951992510102999_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test17017951992510102999_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1701795203.164725, "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "primary_entity": null, "group": null}}, "unit_tests": {}} diff --git a/tests/functional/artifacts/data/state/v12/manifest.json b/tests/functional/artifacts/data/state/v12/manifest.json deleted file mode 100644 index d0ec33487..000000000 --- a/tests/functional/artifacts/data/state/v12/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": "1.8.0a1", "generated_at": "2023-12-13T17:51:37.252335Z", "invocation_id": "ea31128b-c8be-4ccf-806a-112748d83b11", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.4497569, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.062557, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17024898921033785545_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17024898921033785545_test_previous_version_state"}, "created_at": 1702489893.131624, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.215913, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1702489893.278812, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.396907, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/1j/l_jt_2w16t5dnplmd2n0cr880000gq/T/pytest-of-gerda/pytest-106/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.496192, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1702489893.4976692, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1702489893.6685581}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.459133, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.459455, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.45973, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.460128, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4604428, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.460591, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.460742, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4608908, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.462921, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.463424, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4643211, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.464503, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4763231, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.477042, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4774752, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.477914, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.478566, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.479178, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.479415, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4798899, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4811032, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4822998, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.482568, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.483017, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.483407, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.484009, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.484319, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.48519, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4854872, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.485644, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.485892, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.486088, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.486612, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.487571, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.487763, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4881668, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.488352, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.488592, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.489763, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4905322, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.490937, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4914439, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.491634, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4926, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4928472, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4930282, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4938009, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4940412, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4943411, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4951968, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.4995358, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.499748, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.500465, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.501039, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5025449, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.502815, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503018, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503212, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503405, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.503903, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.504405, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.504831, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.505419, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.505802, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.510586, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.510832, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5111418, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5121439, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.512374, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.512612, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5145621, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.516487, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.521842, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522252, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5224829, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522608, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522808, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.522967, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.523251, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.524491, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.524776, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.525128, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5257301, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.534119, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.537909, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5393128, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.539742, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5400288, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as actual_or_expected\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected AS (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as actual_or_expected\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5407722, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.541282, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.541804, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n {%- endfor -%}\n\n {% set unit_test_sql = get_unit_test_sql(sql, get_expected_sql(expected_rows, column_name_to_data_types), tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.get_expected_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.543893, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.550401, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5509398, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.551308, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.55323, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.553552, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.554471, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.558391, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.56234, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.564464, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.565242, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.566174, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5665019, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5675159, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5758579, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.578167, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5785348, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.579946, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.58032, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.581223, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.58209, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.583271, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5836082, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5838752, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.584299, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.584571, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5850089, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5852711, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.585648, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.585918, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5861251, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5865128, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.5934541, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6008031, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.602528, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6041899, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6053782, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.605706, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.605867, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.606281, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.606463, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.611434, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.615864, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6225102, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.623789, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6241221, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.624792, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.625057, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.625251, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6254442, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.625603, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6258318, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6259909, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.626668, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.62693, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.628712, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.629285, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.629807, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.63053, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6308942, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.631285, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.631825, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6321821, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.633137, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6336472, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.633909, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.63418, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.634453, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6355321, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6373062, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6378188, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.638164, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.63853, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.638835, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.639267, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.639553, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6404989, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.641108, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6413922, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.641782, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6422558, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.642634, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.643284, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.643911, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6443572, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.64466, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.645021, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.645167, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6455371, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.645806, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.646227, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.646407, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.646778, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6469781, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.64781, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6480699, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"<change_category>\": [{\"action\": \"<name>\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.648458, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6486568, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6490319, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.649228, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.650592, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.650763, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.65154, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.651774, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.651969, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6539361, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.654461, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.654939, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.655298, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6554399, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.655802, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.656011, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.656379, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.656577, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6577091, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.65796, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.658557, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.659515, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.660146, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6604002, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6606479, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.661003, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6611462, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.662261, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.662462, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.664104, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6643698, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.664682, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.66505, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.665251, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.665806, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.666029, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.666276, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.666858, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6673222, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.667722, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6680498, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6688168, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.670816, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6716192, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6720269, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6744618, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.676194, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.677233, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6775522, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.677862, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.677966, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.678944, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6797628, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.68008, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.680588, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.681036, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.68126, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.68159, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.681758, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6828642, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.683444, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.683711, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.684439, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.684801, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6849449, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.685508, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.685786, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.686151, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.686363, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6867359, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.686934, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6873422, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.687535, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.688407, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.688978, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6894479, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6896799, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690075, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690288, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690662, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.690897, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691238, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691461, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691802, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.691951, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6923468, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6925418, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.692876, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.693096, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.694456, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.694669, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.694897, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6951098, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.695392, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.695667, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6958919, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696158, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696384, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696604, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.696844, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.697058, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6972768, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6974878, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6978838, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.698067, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.698406, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.6985521, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.699027, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.699476, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.699679, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.700398, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.700626, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.700948, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.701324, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70152, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.702045, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7023842, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7027788, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.702966, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.703469, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.703722, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7039502, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.704203, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.704849, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70506, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7052631, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70541, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7056398, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.705746, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.706052, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.706282, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.707399, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.707598, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.707818, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7083972, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7086651, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.708859, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.709079, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.70926, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.711892, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.712121, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.712429, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.712831, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.713176, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.713624, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.713892, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.714118, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.714466, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.715341, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.715683, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.715879, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.716444, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.717024, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7174232, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.717751, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.720056, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7202182, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7204502, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.720602, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.721077, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.721333, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7214751, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7217898, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7220511, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7223868, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.722655, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.722969, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7240279, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7242901, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.724632, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7249548, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.726592, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.727339, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.727602, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.727795, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7286851, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7289228, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.729202, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.729436, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7298071, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.730485, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.734237, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.734608, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.734891, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.735255, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7355149, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.735737, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7359931, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7364202, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7367098, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.737128, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7373838, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.737608, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7378361, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.73805, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.738337, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.738574, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.741552, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.741784, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.742213, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.742526, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.74281, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7430599, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.744768, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.745255, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.745512, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.745993, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7463129, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7471611, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.747524, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.748637, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this) -%}\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{%- do column_name_to_data_types.update({column.name: column.dtype}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this) }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7529018, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * FROM dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- do format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in row.items() %} {{ column_value }} AS {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.753653, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n\n{#-- wrap yaml strings in quotes, apply cast --#}\n{%- for column_name, column_value in row.items() -%}\n{% set row_update = {column_name: column_value} %}\n{%- if column_value is string -%}\n{%- set row_update = {column_name: safe_cast(dbt.string_literal(column_value), column_name_to_data_types[column_name]) } -%}\n{%- elif column_value is none -%}\n{%- set row_update = {column_name: safe_cast('null', column_name_to_data_types[column_name]) } -%}\n{%- else -%}\n{%- set row_update = {column_name: safe_cast(column_value, column_name_to_data_types[column_name]) } -%}\n{%- endif -%}\n{%- do row.update(row_update) -%}\n{%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7547069, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.756869, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7570791, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.758251, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7588322, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.75966, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7603211, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7604249, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.761129, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.761452, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7618601, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1702489892.7623239, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1702489893.579107}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1702489893.629962, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1702489893.63043, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1702489893.632082, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1702489893.633114, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.059992, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17024898921033785545_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17024898921033785545_test_previous_version_state", "enabled": false}, "created_at": 1702489893.159178, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17024898921033785545_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.2125812, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.275594, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17024898921033785545_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.503576, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1702489893.5801542}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1702489893.631113, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test17024898921033785545_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1702489893.520305, "config_call_dict": {}, "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/1j/l_jt_2w16t5dnplmd2n0cr880000gq/T/pytest-of-gerda/pytest-106/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1702489893.668709}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test17024898921033785545_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test17024898921033785545_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1702489893.6660612, "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "primary_entity": null, "group": null}}, "unit_tests": {}} diff --git a/tests/functional/artifacts/data/state/v2/manifest.json b/tests/functional/artifacts/data/state/v2/manifest.json deleted file mode 100644 index 25532471a..000000000 --- a/tests/functional/artifacts/data/state/v2/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v2.json", "dbt_version": "0.20.2", "generated_at": "2022-06-08T05:12:43.870174Z", "invocation_id": "b9b21a26-1804-47f9-866b-620501fe5540", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "materialized": "view", "persist_docs": {}, "vars": {}, "quoting": {}, "column_types": {}, "alias": null, "schema": null, "database": null, "tags": [], "full_refresh": null, "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1654665164}}, "sources": {}, "macros": {"macro.test.drop_relation": {"unique_id": "macro.test.drop_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(dbt_labs_materialized_views.drop_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.test.postgres__list_relations_without_caching": {"unique_id": "macro.test.postgres__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.postgres__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.test.postgres_get_relations": {"unique_id": "macro.test.postgres_get_relations", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(dbt_labs_materialized_views.postgres_get_relations()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres_get_relations"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.test.redshift__list_relations_without_caching": {"unique_id": "macro.test.redshift__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "redshift__list_relations_without_caching", "macro_sql": "{% macro redshift__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.redshift__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.redshift__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.test.load_relation": {"unique_id": "macro.test.load_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(dbt_labs_materialized_views.redshift_load_relation_or_mv(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence = 'p' -- [p]ermanent table. Other values are [u]nlogged table, [t]emporary table\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% for column_name in column_dict %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "materialization_test_default", "macro_sql": "\n\n{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n \n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n \n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n \n {% do relations.append(target_relation) %}\n \n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n \n {{ adapter.commit() }}\n \n {% else %}\n\n {% set main_sql = sql %}\n \n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n \n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.column_list": {"unique_id": "macro.dbt.column_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list", "macro_sql": "{% macro column_list(columns) %}\n {%- for col in columns %}\n {{ col.name }} {% if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.column_list_for_create_table": {"unique_id": "macro.dbt.column_list_for_create_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list_for_create_table", "macro_sql": "{% macro column_list_for_create_table(columns) %}\n {%- for col in columns %}\n {{ col.name }} {{ col.data_type }} {%- if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n ;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/fishtown-analytics/dbt/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = config.get('updated_at', snapshot_string_as_time(now)) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.run_query", "macro.dbt.snapshot_string_as_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_schema", "macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665163}, "macro.dbt.basic_load_csv_rows": {"unique_id": "macro.dbt.basic_load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "basic_load_csv_rows", "macro_sql": "{% macro basic_load_csv_rows(model, batch_size, agate_table) %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n %s\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_seed_column_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n {{ return(basic_load_csv_rows(model, 10000, agate_table) )}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.basic_load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.incremental_upsert": {"unique_id": "macro.dbt.incremental_upsert", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/helpers.sql", "original_file_path": "macros/materializations/incremental/helpers.sql", "name": "incremental_upsert", "macro_sql": "{% macro incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name=\"main\") %}\n {%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') -%}\n\n {%- if unique_key is not none -%}\n delete\n from {{ target_relation }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ tmp_relation }}\n );\n {%- endif %}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ tmp_relation }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/incremental.sql", "original_file_path": "macros/materializations/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% elif existing_relation.is_view or should_full_refresh() %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% do adapter.drop_relation(intermediate_relation) %}\n {% do adapter.drop_relation(backup_relation) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %} \n {% do adapter.rename_relation(target_relation, backup_relation) %} \n {% do adapter.rename_relation(intermediate_relation, target_relation) %} \n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.load_relation", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.should_full_refresh", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.incremental_upsert", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.common_get_delete_insert_merge_sql": {"unique_id": "macro.dbt.common_get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "common_get_delete_insert_merge_sql", "macro_sql": "{% macro common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n );\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.common_get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/table/table.sql", "original_file_path": "macros/materializations/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_table_as(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.create_indexes", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/view.sql", "original_file_path": "macros/materializations/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', macro_namespace = 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view(run_outside_transaction_hooks=True) %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {% if run_outside_transaction_hooks %}\n -- no transactions on BigQuery\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n {% endif %}\n\n -- `BEGIN` happens here on Snowflake\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if run_outside_transaction_hooks %}\n -- No transactions on BigQuery\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n {% endif %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_alias.sql", "original_file_path": "macros/etc/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/query.sql", "original_file_path": "macros/etc/query.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/is_incremental.sql", "original_file_path": "macros/etc/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\nselect *\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by 1\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665164}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--models` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/overview)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [chat](https://community.getdbt.com/) on Slack for live questions and support."}}, "exposures": {}, "selectors": {}, "disabled": [], "parent_map": {"model.test.my_model": []}, "child_map": {"model.test.my_model": []}} diff --git a/tests/functional/artifacts/data/state/v3/manifest.json b/tests/functional/artifacts/data/state/v3/manifest.json deleted file mode 100644 index a9eb02068..000000000 --- a/tests/functional/artifacts/data/state/v3/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v3.json", "dbt_version": "0.21.1", "generated_at": "2022-06-08T05:12:26.978818Z", "invocation_id": "a2594229-14b7-46fe-864f-37cabb5f5f65", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "on_schema_change": "ignore", "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1654665147}}, "sources": {}, "macros": {"macro.test.drop_relation": {"unique_id": "macro.test.drop_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(dbt_labs_materialized_views.drop_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.test.postgres__list_relations_without_caching": {"unique_id": "macro.test.postgres__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.postgres__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.test.postgres_get_relations": {"unique_id": "macro.test.postgres_get_relations", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(dbt_labs_materialized_views.postgres_get_relations()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres_get_relations"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.test.redshift__list_relations_without_caching": {"unique_id": "macro.test.redshift__list_relations_without_caching", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "redshift__list_relations_without_caching", "macro_sql": "{% macro redshift__list_relations_without_caching(schema_relation) %}\n {{ return(dbt_labs_materialized_views.redshift__list_relations_without_caching(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.redshift__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.test.load_relation": {"unique_id": "macro.test.load_relation", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "macros/whatever.sql", "original_file_path": "macros/whatever.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(dbt_labs_materialized_views.redshift_load_relation_or_mv(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence = 'p' -- [p]ermanent table. Other values are [u]nlogged table, [t]emporary table\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665146}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% for column_name in column_dict %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/product/dbt-core/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/core.sql", "original_file_path": "macros/core.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/test.sql", "original_file_path": "macros/materializations/test.sql", "name": "materialization_test_default", "macro_sql": "\n\n{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n \n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n \n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n \n {% do relations.append(target_relation) %}\n \n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n \n {{ adapter.commit() }}\n \n {% else %}\n\n {% set main_sql = sql %}\n \n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n \n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.column_list": {"unique_id": "macro.dbt.column_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list", "macro_sql": "{% macro column_list(columns) %}\n {%- for col in columns %}\n {{ col.name }} {% if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.column_list_for_create_table": {"unique_id": "macro.dbt.column_list_for_create_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "column_list_for_create_table", "macro_sql": "{% macro column_list_for_create_table(columns) %}\n {%- for col in columns %}\n {{ col.name }} {{ col.data_type }} {%- if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/helpers.sql", "original_file_path": "macros/materializations/helpers.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/strategies.sql", "original_file_path": "macros/materializations/snapshot/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = config.get('updated_at', snapshot_string_as_time(now)) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.run_query", "macro.dbt.snapshot_string_as_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/snapshot/snapshot.sql", "original_file_path": "macros/materializations/snapshot/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_schema", "macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/seed/seed.sql", "original_file_path": "macros/materializations/seed/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.incremental_upsert": {"unique_id": "macro.dbt.incremental_upsert", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/helpers.sql", "original_file_path": "macros/materializations/incremental/helpers.sql", "name": "incremental_upsert", "macro_sql": "{% macro incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name=\"main\") %}\n \n {%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') -%}\n\n {%- if unique_key is not none -%}\n delete\n from {{ target_relation }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ tmp_relation }}\n );\n {%- endif %}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ tmp_relation }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/incremental.sql", "original_file_path": "macros/materializations/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n -- the intermediate_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {% set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) %} \n {% set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {# -- first check whether we want to full refresh for source view or config reasons #}\n {% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n{% elif trigger_full_refresh %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + '__dbt_backup' %}\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {% do process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}\n \n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %} \n {% do adapter.rename_relation(target_relation, backup_relation) %} \n {% do adapter.rename_relation(intermediate_relation, target_relation) %} \n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.load_relation", "macro.dbt.make_temp_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.incremental_upsert", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n \n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n \n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n \n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n \n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n \n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n \n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n \n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }} \n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n \n {% set schema_changed = False %}\n \n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n \n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n \n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n \n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n \n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n \n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %} \n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n \n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n \n {% do log(schema_change_message) %}\n \n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n \n {% if on_schema_change != 'ignore' %}\n \n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n \n {% if schema_changes_dict['schema_changed'] %}\n \n {% if on_schema_change == 'fail' %}\n \n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways: \n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n \n {% do exceptions.raise_compiler_error(fail_msg) %}\n \n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n \n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {% endif %}\n \n {% endif %}\n \n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.common_get_delete_insert_merge_sql": {"unique_id": "macro.dbt.common_get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "common_get_delete_insert_merge_sql", "macro_sql": "{% macro common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.common_get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/common/merge.sql", "original_file_path": "macros/materializations/common/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/table/table.sql", "original_file_path": "macros/materializations/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_table_as(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.create_indexes", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/view.sql", "original_file_path": "macros/materializations/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/materializations/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_alias.sql", "original_file_path": "macros/etc/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_alias.sql", "original_file_path": "macros/etc/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/query.sql", "original_file_path": "macros/etc/query.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/is_incremental.sql", "original_file_path": "macros/etc/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/where_subquery.sql", "original_file_path": "macros/etc/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/where_subquery.sql", "original_file_path": "macros/etc/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name, node) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_schema.sql", "original_file_path": "macros/etc/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/etc/get_custom_database.sql", "original_file_path": "macros/etc/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.test.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/adapters/common.sql", "original_file_path": "macros/adapters/common.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n \n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n \n {% set sql -%}\n \n alter {{ relation.type }} {{ relation }}\n \n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n \n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n \n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/relationships.sql", "original_file_path": "macros/schema_tests/relationships.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\nselect *\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/not_null.sql", "original_file_path": "macros/schema_tests/not_null.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/unique.sql", "original_file_path": "macros/schema_tests/unique.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "macros/schema_tests/accepted_values.sql", "original_file_path": "macros/schema_tests/accepted_values.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1654665147}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/product/dbt-core/core/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--models` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/overview)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [chat](https://community.getdbt.com/) on Slack for live questions and support."}}, "exposures": {}, "selectors": {}, "disabled": [], "parent_map": {"model.test.my_model": []}, "child_map": {"model.test.my_model": []}} diff --git a/tests/functional/artifacts/data/state/v4/manifest.json b/tests/functional/artifacts/data/state/v4/manifest.json deleted file mode 100644 index 67b7c244c..000000000 --- a/tests/functional/artifacts/data/state/v4/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v4.json", "dbt_version": "1.0.8", "generated_at": "2022-09-13T08:43:20.641750Z", "invocation_id": "5da6faab-41cb-4180-ab19-8375c0e1f1a5", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "on_schema_change": "ignore", "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1663058601.2387}}, "sources": {}, "macros": {"macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.6944451}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.696331}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.712787}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.714135}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.714985}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7158241}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.717091}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.718103}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7185578}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7194948}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.720533}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.720746}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.72122}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.721492}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7240572}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7252119}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7258098}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.727401}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.729517}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.732625}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7331321}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7335098}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.733889}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7342541}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.735413}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.736206}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.737014}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.738338}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7390552}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7487428}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7491798}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.74975}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7501109}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7503612}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7524228}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.752844}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.753275}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.755879}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = config.get('updated_at', snapshot_string_as_time(now)) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.run_query", "macro.dbt.snapshot_string_as_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.760828}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7705312}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7712681}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.771702}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.77192}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.772458}, "macro.dbt.default__snapshot_staging_table": {"unique_id": "macro.dbt.default__snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7748082}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.775297}, "macro.dbt.default__build_snapshot_table": {"unique_id": "macro.dbt.default__build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.775959}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.777101}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_schema", "macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7941692}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "name": "materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n \n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n \n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n \n {% do relations.append(target_relation) %}\n \n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n \n {{ adapter.commit() }}\n \n {% else %}\n\n {% set main_sql = sql %}\n \n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n \n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.7998898}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.801226}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8020391}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.803186}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.804178}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n \n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.806505}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n \n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n \n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8079581}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n \n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }} \n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.809567}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8179982}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8210711}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.821682}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8228061}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.823494}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.825151}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8269792}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n -- the intermediate_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {% set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) %} \n {% set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {# -- first check whether we want to full refresh for source view or config reasons #}\n {% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n{% elif trigger_full_refresh %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + '__dbt_backup' %}\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = get_delete_insert_merge_sql(target_relation, tmp_relation, unique_key, dest_columns) %}\n \n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %} \n {% do adapter.rename_relation(target_relation, backup_relation) %} \n {% do adapter.rename_relation(intermediate_relation, target_relation) %} \n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.make_temp_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.get_delete_insert_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.840609}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n \n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n \n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n \n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n \n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8552}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n \n {% set schema_changed = False %}\n \n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n \n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n \n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.858468}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n \n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n \n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %} \n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n \n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n \n {% do log(schema_change_message) %}\n \n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8616462}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n \n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n \n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n \n {% if schema_changes_dict['schema_changed'] %}\n \n {% if on_schema_change == 'fail' %}\n \n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways: \n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n \n {% do exceptions.raise_compiler_error(fail_msg) %}\n \n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n \n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n \n {% endif %}\n \n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n \n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8635662}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.874618}, "macro.dbt.get_create_table_as_sql": {"unique_id": "macro.dbt.get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8760588}, "macro.dbt.default__get_create_table_as_sql": {"unique_id": "macro.dbt.default__get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.876541}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.877064}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n \n {{ sql_header if sql_header is not none }}\n \n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.878151}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier, \n schema=schema,\n database=database) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.888351}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.889299}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.889876}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.895395}, "macro.dbt.get_create_view_as_sql": {"unique_id": "macro.dbt.get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8964942}, "macro.dbt.default__get_create_view_as_sql": {"unique_id": "macro.dbt.default__get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.8969111}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.897373}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.898056}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.905844}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9181492}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.920773}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.921389}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.922673}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9230442}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.923341}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.923764}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.924055}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.925326}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9259362}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9291992}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9304068}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.930997}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.93258}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.933256}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.933998}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.93523}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.93588}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9369469}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\nselect *\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9375691}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9383721}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.940022}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.943184}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.944719}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.94549}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.950737}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.954061}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.956046}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.956654}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9578362}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9583108}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9587321}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9592059}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9605691}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9609149}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9613152}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.962351}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9670699}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.967857}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9683268}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.968875}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.969353}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9697769}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9703019}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.971044}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9716868}, "macro.dbt.default__get_or_create_relation": {"unique_id": "macro.dbt.default__get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.973294}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.973898}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.974398}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9758022}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9761899}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9767869}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.977885}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.979867}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.98031}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.980829}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9812522}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.981956}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9832299}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9874718}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9881449}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.98862}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.989036}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.989502}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.990141}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9906712}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.991599}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.992081}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9924932}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9975512}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.997951}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9987469}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058600.9992452}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.000154}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.0007622}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.0023592}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.003041}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n \n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n \n {% set sql -%}\n \n alter {{ relation.type }} {{ relation }}\n \n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n \n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n \n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.005121}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.006654}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.007256}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.007999}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058601.008718}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {"metric.test.my_metric": {"fqn": ["test", "my_metric"], "unique_id": "metric.test.my_metric", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "metric.yml", "original_file_path": "models/metric.yml", "model": "ref('my_model')", "name": "my_metric", "description": "", "label": "Count records", "type": "count", "sql": "*", "timestamp": "updated_at", "filters": [], "time_grains": ["day"], "dimensions": [], "resource_type": "metric", "meta": {}, "tags": [], "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "created_at": 1663058601.2723079}}, "selectors": {}, "disabled": {}, "parent_map": {"model.test.my_model": [], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["metric.test.my_metric"], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v5/manifest.json b/tests/functional/artifacts/data/state/v5/manifest.json deleted file mode 100644 index d6662b2a6..000000000 --- a/tests/functional/artifacts/data/state/v5/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v5.json", "dbt_version": "1.1.2", "generated_at": "2022-09-13T08:43:05.173401Z", "invocation_id": "46690f0c-35b6-44f7-95bc-3a91cbf87484", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1663058585.790391}}, "sources": {}, "macros": {"macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2411761}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2429922}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.258873}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.260246}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.261101}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2619379}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.263221}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.264239}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.264697}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.265624}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2666838}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.266898}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2673979}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.267664}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix_length = suffix|length + dtstring|length %}\n {% set relation_max_name_length = 63 %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Temp relation suffix is too long (' ~ suffix|length ~ ' characters). Maximum length is ' ~ (relation_max_name_length - dtstring|length) ~ ' characters.') %}\n {% endif %}\n {% set tmp_identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix ~ dtstring %}\n {% do return(base_relation.incorporate(\n path={\n \"identifier\": tmp_identifier,\n \"schema\": none,\n \"database\": none\n })) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.270232}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.271394}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.271986}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.273597}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.275718}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.278846}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2793732}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2797408}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.280107}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.280468}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.28161}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.28241}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.283215}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.284517}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.285232}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2955132}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.295966}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.296581}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2969642}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.297218}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.29933}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.2997508}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3001878}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3028228}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3070579}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3179069}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.318669}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3191}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3193212}, "macro.dbt.get_true_sql": {"unique_id": "macro.dbt.get_true_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.319695}, "macro.dbt.default__get_true_sql": {"unique_id": "macro.dbt.default__get_true_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.319982}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.320519}, "macro.dbt.default__snapshot_staging_table": {"unique_id": "macro.dbt.default__snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.322867}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.323358}, "macro.dbt.default__build_snapshot_table": {"unique_id": "macro.dbt.default__build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.324033}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.325182}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.34037}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "name": "materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.346029}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3473558}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.348177}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.349312}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.350326}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.352607}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.35407}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.3557}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.370045}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.374055}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.401227}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last }}\n {% endfor %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4033751}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4041102}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.405853}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4078321}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n -- the intermediate_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {% set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier,\n schema=schema,\n database=database) %}\n {% set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {# -- first check whether we want to full refresh for source view or config reasons #}\n {% set trigger_full_refresh = (full_refresh_mode or existing_relation.is_view) %}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n{% elif trigger_full_refresh %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + '__dbt_backup' %}\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = get_delete_insert_merge_sql(target_relation, tmp_relation, unique_key, dest_columns) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.make_temp_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.get_delete_insert_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.421077}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.434217}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.43755}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.440778}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.442719}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier,\n schema=schema,\n database=database) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.452991}, "macro.dbt.get_create_table_as_sql": {"unique_id": "macro.dbt.get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.454284}, "macro.dbt.default__get_create_table_as_sql": {"unique_id": "macro.dbt.default__get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.454771}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.455303}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.456414}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = adapter.get_relation(identifier=tmp_identifier,\n schema=schema,\n database=database) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = adapter.get_relation(identifier=backup_identifier,\n schema=schema,\n database=database) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(old_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4662242}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.467168}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.46778}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4711108}, "macro.dbt.get_create_view_as_sql": {"unique_id": "macro.dbt.get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4722078}, "macro.dbt.default__get_create_view_as_sql": {"unique_id": "macro.dbt.default__get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.472634}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.473103}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.473792}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4816241}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.493353}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.495795}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.4964218}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.497696}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.498073}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.498365}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.498788}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.499098}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.500374}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.500983}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5042381}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5054028}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.505999}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.507547}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.508227}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.508971}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5101619}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.510827}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.511889}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5128162}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5136151}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.515251}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.518385}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.519926}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5207071}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.525802}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.52915}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.53116}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5317729}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.532931}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.533409}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.533822}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5342898}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5356438}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.536011}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.536417}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5374599}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.542093}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5428941}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.543366}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.543921}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.544389}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.544798}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5453591}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.546091}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.546772}, "macro.dbt.default__get_or_create_relation": {"unique_id": "macro.dbt.default__get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.548408}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.54901}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.549522}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.550913}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.551292}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5518851}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.552993}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.554944}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5553741}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5559008}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.556353}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.557066}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.55835}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5624719}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.563156}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.563638}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.564032}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.564497}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.565144}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.565687}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.566637}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5671191}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.567532}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.572185}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.572603}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5734131}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.573897}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5747972}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.5754082}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.577007}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.577695}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.579798}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.581336}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.581941}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.582701}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058585.583425}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {"metric.test.my_metric": {"fqn": ["test", "my_metric"], "unique_id": "metric.test.my_metric", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "metric.yml", "original_file_path": "models/metric.yml", "model": "ref('my_model')", "name": "my_metric", "description": "", "label": "Count records", "type": "count", "sql": "*", "timestamp": "updated_at", "filters": [], "time_grains": ["day"], "dimensions": [], "resource_type": "metric", "meta": {}, "tags": [], "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "created_at": 1663058585.822956}}, "selectors": {}, "disabled": {}, "parent_map": {"model.test.my_model": [], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["metric.test.my_metric"], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v6/manifest.json b/tests/functional/artifacts/data/state/v6/manifest.json deleted file mode 100644 index e98ee672b..000000000 --- a/tests/functional/artifacts/data/state/v6/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v6.json", "dbt_version": "1.2.1", "generated_at": "2022-09-13T08:42:50.298210Z", "invocation_id": "aa834731-46c3-49aa-8ec8-956dae621b58", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"raw_sql": "select 1 as id", "resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "post-hook": [], "pre-hook": []}, "database": "jerco", "schema": "dbt_jcohen", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "479636cb85ce8d3b0f8db5ff13cf338b61254ad98d905630eac61f963e719e9d"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1663058263.145605}}, "sources": {}, "macros": {"macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.441694}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.443508}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4609761}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.462327}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4631748}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.46401}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.465282}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.46641}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4668732}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4678211}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4688902}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.46909}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.469568}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.469843}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.472267}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.472806}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.473689}, "macro.dbt_postgres.postgres__make_backup_relation": {"unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4744241}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.475544}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4761422}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.477773}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.478305}, "macro.dbt_postgres.postgres__copy_grants": {"unique_id": "macro.dbt_postgres.postgres__copy_grants", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.478604}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.480701}, "macro.dbt_postgres.postgres__dateadd": {"unique_id": "macro.dbt_postgres.postgres__dateadd", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.481434}, "macro.dbt_postgres.postgres__listagg": {"unique_id": "macro.dbt_postgres.postgres__listagg", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.483254}, "macro.dbt_postgres.postgres__datediff": {"unique_id": "macro.dbt_postgres.postgres__datediff", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.492169}, "macro.dbt_postgres.postgres__any_value": {"unique_id": "macro.dbt_postgres.postgres__any_value", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4927092}, "macro.dbt_postgres.postgres__last_day": {"unique_id": "macro.dbt_postgres.postgres__last_day", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.494153}, "macro.dbt_postgres.postgres__split_part": {"unique_id": "macro.dbt_postgres.postgres__split_part", "package_name": "dbt_postgres", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.495394}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.498407}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4989262}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.499295}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.4996538}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5000162}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5011232}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.50192}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.502719}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.50401}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.504728}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.51448}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.51492}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5155}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_get_time", "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.515872}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.516114}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.518018}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5184388}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5188859}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_sql'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.522562}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.526156}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.53722}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.537965}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.538402}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.538624}, "macro.dbt.get_true_sql": {"unique_id": "macro.dbt.get_true_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.538997}, "macro.dbt.default__get_true_sql": {"unique_id": "macro.dbt.default__get_true_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.539289}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.539823}, "macro.dbt.default__snapshot_staging_table": {"unique_id": "macro.dbt.default__snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.542159}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5426402}, "macro.dbt.default__build_snapshot_table": {"unique_id": "macro.dbt.default__build_snapshot_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.543298}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5444548}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.561558}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "name": "materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5671492}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5684512}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.569262}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.570368}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5713542}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5735822}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.5750248}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.576639}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.591182}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.595134}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.622782}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last }}\n {% endfor %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.624852}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.625576}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.627269}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6292028}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {% set build_sql = get_delete_insert_merge_sql(target_relation, temp_relation, unique_key, dest_columns) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.get_delete_insert_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6413598}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.654586}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6579268}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6611688}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.663094}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.670167}, "macro.dbt.get_create_table_as_sql": {"unique_id": "macro.dbt.get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6714208}, "macro.dbt.default__get_create_table_as_sql": {"unique_id": "macro.dbt.default__get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.671903}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.67243}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.673532}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.680615}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6815622}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.682152}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.686346}, "macro.dbt.get_create_view_as_sql": {"unique_id": "macro.dbt.get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.687447}, "macro.dbt.default__get_create_view_as_sql": {"unique_id": "macro.dbt.default__get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6878788}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.688359}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.6890602}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.698056}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7117178}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7141619}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.714784}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.716077}, "macro.dbt.get_csv_sql": {"unique_id": "macro.dbt.get_csv_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.716581}, "macro.dbt.default__get_csv_sql": {"unique_id": "macro.dbt.default__get_csv_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.716925}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.717283}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.717576}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.717988}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.718283}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.719548}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.720028}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.723425}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.724572}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.725169}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.72671}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.727396}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.728158}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.729355}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.73002}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7310588}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.731988}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.732775}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.734423}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "statement", "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.737539}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.739098}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.739882}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.744986}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7483132}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.750348}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.750963}, "macro.dbt.except": {"unique_id": "macro.dbt.except", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "name": "except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7517078}, "macro.dbt.default__except": {"unique_id": "macro.dbt.default__except", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "name": "default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.751895}, "macro.dbt.replace": {"unique_id": "macro.dbt.replace", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "name": "replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.752911}, "macro.dbt.default__replace": {"unique_id": "macro.dbt.default__replace", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "name": "default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.753338}, "macro.dbt.concat": {"unique_id": "macro.dbt.concat", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "name": "concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7541351}, "macro.dbt.default__concat": {"unique_id": "macro.dbt.default__concat", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "name": "default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.754451}, "macro.dbt.length": {"unique_id": "macro.dbt.length", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "name": "length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.75524}, "macro.dbt.default__length": {"unique_id": "macro.dbt.default__length", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "name": "default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.755511}, "macro.dbt.dateadd": {"unique_id": "macro.dbt.dateadd", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7565598}, "macro.dbt.default__dateadd": {"unique_id": "macro.dbt.default__dateadd", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.756995}, "macro.dbt.intersect": {"unique_id": "macro.dbt.intersect", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "name": "intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7577422}, "macro.dbt.default__intersect": {"unique_id": "macro.dbt.default__intersect", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "name": "default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7579389}, "macro.dbt.escape_single_quotes": {"unique_id": "macro.dbt.escape_single_quotes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "name": "escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7587788}, "macro.dbt.default__escape_single_quotes": {"unique_id": "macro.dbt.default__escape_single_quotes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "name": "default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.75913}, "macro.dbt.right": {"unique_id": "macro.dbt.right", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "name": "right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.760025}, "macro.dbt.default__right": {"unique_id": "macro.dbt.default__right", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "name": "default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.760372}, "macro.dbt.listagg": {"unique_id": "macro.dbt.listagg", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.762194}, "macro.dbt.default__listagg": {"unique_id": "macro.dbt.default__listagg", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7634282}, "macro.dbt.datediff": {"unique_id": "macro.dbt.datediff", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.764456}, "macro.dbt.default__datediff": {"unique_id": "macro.dbt.default__datediff", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.764895}, "macro.dbt.safe_cast": {"unique_id": "macro.dbt.safe_cast", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "name": "safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.76579}, "macro.dbt.default__safe_cast": {"unique_id": "macro.dbt.default__safe_cast", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "name": "default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7661529}, "macro.dbt.hash": {"unique_id": "macro.dbt.hash", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "name": "hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.766952}, "macro.dbt.default__hash": {"unique_id": "macro.dbt.default__hash", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "name": "default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.767371}, "macro.dbt.cast_bool_to_text": {"unique_id": "macro.dbt.cast_bool_to_text", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "name": "cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7681432}, "macro.dbt.default__cast_bool_to_text": {"unique_id": "macro.dbt.default__cast_bool_to_text", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "name": "default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.768564}, "macro.dbt.any_value": {"unique_id": "macro.dbt.any_value", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.769363}, "macro.dbt.default__any_value": {"unique_id": "macro.dbt.default__any_value", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.769631}, "macro.dbt.position": {"unique_id": "macro.dbt.position", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "name": "position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.770546}, "macro.dbt.default__position": {"unique_id": "macro.dbt.default__position", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "name": "default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7708979}, "macro.dbt.string_literal": {"unique_id": "macro.dbt.string_literal", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "name": "string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.77168}, "macro.dbt.default__string_literal": {"unique_id": "macro.dbt.default__string_literal", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "name": "default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.771949}, "macro.dbt.type_string": {"unique_id": "macro.dbt.type_string", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.775103}, "macro.dbt.default__type_string": {"unique_id": "macro.dbt.default__type_string", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7755132}, "macro.dbt.type_timestamp": {"unique_id": "macro.dbt.type_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7759519}, "macro.dbt.default__type_timestamp": {"unique_id": "macro.dbt.default__type_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.776351}, "macro.dbt.type_float": {"unique_id": "macro.dbt.type_float", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7769148}, "macro.dbt.default__type_float": {"unique_id": "macro.dbt.default__type_float", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.777309}, "macro.dbt.type_numeric": {"unique_id": "macro.dbt.type_numeric", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7777238}, "macro.dbt.default__type_numeric": {"unique_id": "macro.dbt.default__type_numeric", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.778186}, "macro.dbt.type_bigint": {"unique_id": "macro.dbt.type_bigint", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7786}, "macro.dbt.default__type_bigint": {"unique_id": "macro.dbt.default__type_bigint", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.778995}, "macro.dbt.type_int": {"unique_id": "macro.dbt.type_int", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7794151}, "macro.dbt.default__type_int": {"unique_id": "macro.dbt.default__type_int", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.779795}, "macro.dbt.bool_or": {"unique_id": "macro.dbt.bool_or", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "name": "bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.780659}, "macro.dbt.default__bool_or": {"unique_id": "macro.dbt.default__bool_or", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "name": "default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.780927}, "macro.dbt.last_day": {"unique_id": "macro.dbt.last_day", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.782002}, "macro.dbt.default_last_day": {"unique_id": "macro.dbt.default_last_day", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.782704}, "macro.dbt.default__last_day": {"unique_id": "macro.dbt.default__last_day", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7830899}, "macro.dbt.split_part": {"unique_id": "macro.dbt.split_part", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7846441}, "macro.dbt.default__split_part": {"unique_id": "macro.dbt.default__split_part", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7850811}, "macro.dbt._split_part_negative": {"unique_id": "macro.dbt._split_part_negative", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "_split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.785673}, "macro.dbt.date_trunc": {"unique_id": "macro.dbt.date_trunc", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "name": "date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7865448}, "macro.dbt.default__date_trunc": {"unique_id": "macro.dbt.default__date_trunc", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "name": "default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.786887}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.788033}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7884989}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.788918}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.7895281}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.79088}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.791239}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.791649}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.792712}, "macro.dbt.make_intermediate_relation": {"unique_id": "macro.dbt.make_intermediate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.80177}, "macro.dbt.default__make_intermediate_relation": {"unique_id": "macro.dbt.default__make_intermediate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.802206}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.802784}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8035462}, "macro.dbt.make_backup_relation": {"unique_id": "macro.dbt.make_backup_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.804203}, "macro.dbt.default__make_backup_relation": {"unique_id": "macro.dbt.default__make_backup_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.805045}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.805522}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8060842}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.806554}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.806978}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8075058}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.808251}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.808909}, "macro.dbt.default__get_or_create_relation": {"unique_id": "macro.dbt.default__get_or_create_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.810538}, "macro.dbt.load_cached_relation": {"unique_id": "macro.dbt.load_cached_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8111548}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.811518}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.812018}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "current_timestamp", "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.813409}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.813792}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8143969}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.815532}, "macro.dbt.copy_grants": {"unique_id": "macro.dbt.copy_grants", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.820182}, "macro.dbt.default__copy_grants": {"unique_id": "macro.dbt.default__copy_grants", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.820483}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.820917}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8212}, "macro.dbt.should_revoke": {"unique_id": "macro.dbt.should_revoke", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8221052}, "macro.dbt.get_show_grant_sql": {"unique_id": "macro.dbt.get_show_grant_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8225951}, "macro.dbt.default__get_show_grant_sql": {"unique_id": "macro.dbt.default__get_show_grant_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.822855}, "macro.dbt.get_grant_sql": {"unique_id": "macro.dbt.get_grant_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.823464}, "macro.dbt.default__get_grant_sql": {"unique_id": "macro.dbt.default__get_grant_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.823967}, "macro.dbt.get_revoke_sql": {"unique_id": "macro.dbt.get_revoke_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8245819}, "macro.dbt.default__get_revoke_sql": {"unique_id": "macro.dbt.default__get_revoke_sql", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.825077}, "macro.dbt.get_dcl_statement_list": {"unique_id": "macro.dbt.get_dcl_statement_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.825684}, "macro.dbt.default__get_dcl_statement_list": {"unique_id": "macro.dbt.default__get_dcl_statement_list", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.827622}, "macro.dbt.call_dcl_statements": {"unique_id": "macro.dbt.call_dcl_statements", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.828118}, "macro.dbt.default__call_dcl_statements": {"unique_id": "macro.dbt.default__call_dcl_statements", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.828738}, "macro.dbt.apply_grants": {"unique_id": "macro.dbt.apply_grants", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8293319}, "macro.dbt.default__apply_grants": {"unique_id": "macro.dbt.default__apply_grants", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8324008}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.834379}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.834809}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.835356}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.835799}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8365178}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.837826}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.841973}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.842654}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.843149}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8435562}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8440409}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8447008}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.845243}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.846046}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.846539}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.846957}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.851595}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.852285}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.853091}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.853559}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.854439}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.85503}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.856618}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8572822}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.8593512}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.87712}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.877723}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.878474}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true}, "patch_path": null, "arguments": [], "created_at": 1663058262.879189}}, "docs": {"dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/jerco/dev/scratch/testy/env/lib/python3.9/site-packages/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {"metric.test.my_metric": {"fqn": ["test", "my_metric"], "unique_id": "metric.test.my_metric", "package_name": "test", "root_path": "/Users/jerco/dev/scratch/testy", "path": "metric.yml", "original_file_path": "models/metric.yml", "name": "my_metric", "description": "", "label": "Count records", "type": "count", "sql": "*", "timestamp": "updated_at", "filters": [], "time_grains": ["day"], "dimensions": [], "model": "ref('my_model')", "model_unique_id": null, "resource_type": "metric", "meta": {}, "tags": [], "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "metrics": [], "created_at": 1663058517.2551522}}, "selectors": {}, "disabled": {}, "parent_map": {"model.test.my_model": [], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["metric.test.my_metric"], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v7/manifest.json b/tests/functional/artifacts/data/state/v7/manifest.json deleted file mode 100644 index e8529bd3c..000000000 --- a/tests/functional/artifacts/data/state/v7/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v7.json", "dbt_version": "1.3.2", "generated_at": "2023-02-13T21:34:36.870255Z", "invocation_id": "96c0aa43-0ccd-4420-a50c-05c0f22a0df1", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "my_model"], "unique_id": "model.test.my_model", "raw_code": "select 1 as id", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "name": "my_model", "alias": "my_model", "checksum": {"name": "sha256", "checksum": "2b9123e04ab8bb798f7c565afdc3ee0e56fcd66b4bfbdb435b4891c878d947c5"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.566336}, "snapshot.test.snapshot_seed": {"resource_type": "snapshot", "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "strategy": "check", "target_schema": "test16763240740000063267_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "unique_id": "snapshot.test.snapshot_seed", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "name": "snapshot_seed", "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "00c13c306831070996970605fbc4c901aa456e1ed1c028725a932e4e6a4ffb0a"}, "tags": [], "refs": [["my_seed"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763240740000063267_test_previous_version_state"}, "created_at": 1676324075.423856}, "analysis.test.a": {"resource_type": "analysis", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "analysis", "a"], "unique_id": "analysis.test.a", "raw_code": "select 4 as id", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "name": "a", "alias": "a", "checksum": {"name": "sha256", "checksum": "bd1ee600e4e80d03f488fee52a66e8d51b5be2b98acc20df1cf8be4670d86ae5"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.453177}, "test.test.just_my": {"resource_type": "test", "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "just_my"], "unique_id": "test.test.just_my", "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "name": "just_my", "alias": "just_my", "checksum": {"name": "sha256", "checksum": "f30b7a814e0e3761d1a8042aa40d658d6c33affb28cd92782b0f56559c414fd8"}, "tags": ["data_test_tag"], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1676324075.520421}, "seed.test.my_seed": {"resource_type": "seed", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "my_seed"], "unique_id": "seed.test.my_seed", "raw_code": "", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "name": "my_seed", "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "22697c9b76d73a6c7561554ddb2ce101428ea2737ba8dc500d52ebcfdcfcfc13"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.542836}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "resource_type": "test", "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "not_null_my_model_id"], "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "name": "not_null_my_model_id", "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "tags": [], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.575407, "column_name": "id", "file_key_name": "models.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "resource_type": "test", "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "check_nothing_my_model_"], "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "name": "check_nothing_my_model_", "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "tags": [], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676324075.577614, "column_name": null, "file_key_name": "models.my_model"}}, "sources": {"source.test.my_source.my_table": {"fqn": ["test", "my_source", "my_table"], "database": "dbt", "schema": "my_source", "unique_id": "source.test.my_source.my_table", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "name": "my_table", "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "resource_type": "source", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1676324075.624893}}, "macros": {"macro.test.test_check_nothing": {"unique_id": "macro.test.test_check_nothing", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "name": "test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.712246, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"unique_id": "macro.test.test_disabled_check_nothing", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "name": "test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.713166, "supported_languages": null}, "macro.test.do_nothing": {"unique_id": "macro.test.do_nothing", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "name": "do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7140381, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.714999, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.715521, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7158241, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.716109, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "name": "postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7164018, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"unique_id": "macro.dbt_postgres.postgres__get_catalog", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "name": "postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.719163, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"unique_id": "macro.dbt_postgres.postgres_get_relations", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "name": "postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7208161, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"unique_id": "macro.dbt_postgres.postgres__create_table_as", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.733047, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.734891, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"unique_id": "macro.dbt_postgres.postgres__create_schema", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.735866, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"unique_id": "macro.dbt_postgres.postgres__drop_schema", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.736712, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.738183, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.739969, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"unique_id": "macro.dbt_postgres.postgres__information_schema_name", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.740651, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"unique_id": "macro.dbt_postgres.postgres__list_schemas", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.741567, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.742538, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.744768, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.745294, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.746094, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.746795, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"unique_id": "macro.dbt_postgres.postgres_escape_comment", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.747864, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7484329, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7500181, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.750537, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"unique_id": "macro.dbt_postgres.postgres__copy_grants", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "name": "postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.750845, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "name": "postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.752257, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "name": "postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.754163, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"unique_id": "macro.dbt_postgres.postgres__dateadd", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.755116, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"unique_id": "macro.dbt_postgres.postgres__listagg", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.757173, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"unique_id": "macro.dbt_postgres.postgres__datediff", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.765253, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"unique_id": "macro.dbt_postgres.postgres__any_value", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.766171, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"unique_id": "macro.dbt_postgres.postgres__last_day", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.767721, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"unique_id": "macro.dbt_postgres.postgres__split_part", "package_name": "dbt_postgres", "root_path": "/Users/gerda/FTA/dbt/plugins/postgres/dbt/include/postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7690408, "supported_languages": null}, "macro.dbt.run_hooks": {"unique_id": "macro.dbt.run_hooks", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7719731, "supported_languages": null}, "macro.dbt.make_hook_config": {"unique_id": "macro.dbt.make_hook_config", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.772469, "supported_languages": null}, "macro.dbt.before_begin": {"unique_id": "macro.dbt.before_begin", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7729542, "supported_languages": null}, "macro.dbt.in_transaction": {"unique_id": "macro.dbt.in_transaction", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.773334, "supported_languages": null}, "macro.dbt.after_commit": {"unique_id": "macro.dbt.after_commit", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "name": "after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.773783, "supported_languages": null}, "macro.dbt.set_sql_header": {"unique_id": "macro.dbt.set_sql_header", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7750452, "supported_languages": null}, "macro.dbt.should_full_refresh": {"unique_id": "macro.dbt.should_full_refresh", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.775828, "supported_languages": null}, "macro.dbt.should_store_failures": {"unique_id": "macro.dbt.should_store_failures", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "name": "should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.776683, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"unique_id": "macro.dbt.snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.778024, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"unique_id": "macro.dbt.default__snapshot_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "name": "default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.778701, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"unique_id": "macro.dbt.strategy_dispatch", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.786231, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"unique_id": "macro.dbt.snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.78667, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"unique_id": "macro.dbt.default__snapshot_hash_arguments", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.787231, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"unique_id": "macro.dbt.snapshot_timestamp_strategy", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.788982, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"unique_id": "macro.dbt.snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.789396, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"unique_id": "macro.dbt.default__snapshot_string_as_time", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.789981, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_code'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.793568, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"unique_id": "macro.dbt.snapshot_check_strategy", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "name": "snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.7968981, "supported_languages": null}, "macro.dbt.create_columns": {"unique_id": "macro.dbt.create_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.80455, "supported_languages": null}, "macro.dbt.default__create_columns": {"unique_id": "macro.dbt.default__create_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.805295, "supported_languages": null}, "macro.dbt.post_snapshot": {"unique_id": "macro.dbt.post_snapshot", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.805722, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"unique_id": "macro.dbt.default__post_snapshot", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.805967, "supported_languages": null}, "macro.dbt.get_true_sql": {"unique_id": "macro.dbt.get_true_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8063412, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"unique_id": "macro.dbt.default__get_true_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.806646, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"unique_id": "macro.dbt.snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.807165, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"unique_id": "macro.dbt.default__snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.80939, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"unique_id": "macro.dbt.build_snapshot_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.809878, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"unique_id": "macro.dbt.default__build_snapshot_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.810522, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"unique_id": "macro.dbt.build_snapshot_staging_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "name": "build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.811589, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"unique_id": "macro.dbt.materialization_snapshot_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "name": "materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.826415, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"unique_id": "macro.dbt.materialization_test_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "name": "materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.831668, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"unique_id": "macro.dbt.get_test_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8331032, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"unique_id": "macro.dbt.default__get_test_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "name": "default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8338718, "supported_languages": null}, "macro.dbt.get_where_subquery": {"unique_id": "macro.dbt.get_where_subquery", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8350341, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"unique_id": "macro.dbt.default__get_where_subquery", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "name": "default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.835979, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"unique_id": "macro.dbt.get_quoted_csv", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8394618, "supported_languages": null}, "macro.dbt.diff_columns": {"unique_id": "macro.dbt.diff_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8409832, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"unique_id": "macro.dbt.diff_column_data_types", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.842655, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"unique_id": "macro.dbt.get_merge_update_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8432481, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"unique_id": "macro.dbt.default__get_merge_update_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "name": "default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.845012, "supported_languages": null}, "macro.dbt.get_merge_sql": {"unique_id": "macro.dbt.get_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8534558, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"unique_id": "macro.dbt.default__get_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.857812, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"unique_id": "macro.dbt.get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.85844, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last }}\n {% endfor %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8602839, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.860947, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "name": "default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.862558, "supported_languages": null}, "macro.dbt.is_incremental": {"unique_id": "macro.dbt.is_incremental", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "name": "is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.864465, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"unique_id": "macro.dbt.get_incremental_append_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.866665, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"unique_id": "macro.dbt.default__get_incremental_append_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.867279, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"unique_id": "macro.dbt.get_incremental_delete_insert_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.867753, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.868414, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"unique_id": "macro.dbt.get_incremental_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.868883, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"unique_id": "macro.dbt.default__get_incremental_merge_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8695412, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.870015, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"predicates\"])) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8706748, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"unique_id": "macro.dbt.get_incremental_default_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8712032, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"unique_id": "macro.dbt.default__get_incremental_default_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.871608, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"unique_id": "macro.dbt.get_insert_into_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "name": "get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.872406, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"unique_id": "macro.dbt.materialization_incremental_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "name": "materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8839822, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"unique_id": "macro.dbt.incremental_validate_on_schema_change", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.894743, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"unique_id": "macro.dbt.check_for_schema_changes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.8977559, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"unique_id": "macro.dbt.sync_column_schemas", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.900897, "supported_languages": null}, "macro.dbt.process_schema_changes": {"unique_id": "macro.dbt.process_schema_changes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "name": "process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9030159, "supported_languages": null}, "macro.dbt.materialization_table_default": {"unique_id": "macro.dbt.materialization_table_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "name": "materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.909476, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"unique_id": "macro.dbt.get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.911144, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"unique_id": "macro.dbt.default__get_create_table_as_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.911638, "supported_languages": null}, "macro.dbt.create_table_as": {"unique_id": "macro.dbt.create_table_as", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.912726, "supported_languages": null}, "macro.dbt.default__create_table_as": {"unique_id": "macro.dbt.default__create_table_as", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "name": "default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9137652, "supported_languages": null}, "macro.dbt.materialization_view_default": {"unique_id": "macro.dbt.materialization_view_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "name": "materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.919893, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"unique_id": "macro.dbt.handle_existing_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.921067, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"unique_id": "macro.dbt.default__handle_existing_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "name": "default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.921632, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"unique_id": "macro.dbt.create_or_replace_view", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "name": "create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.925656, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"unique_id": "macro.dbt.get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.926919, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"unique_id": "macro.dbt.default__get_create_view_as_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.927346, "supported_languages": null}, "macro.dbt.create_view_as": {"unique_id": "macro.dbt.create_view_as", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9278772, "supported_languages": null}, "macro.dbt.default__create_view_as": {"unique_id": "macro.dbt.default__create_view_as", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "name": "default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.928556, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"unique_id": "macro.dbt.materialization_seed_default", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "name": "materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9364128, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"unique_id": "macro.dbt.create_csv_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9457762, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"unique_id": "macro.dbt.default__create_csv_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.948132, "supported_languages": null}, "macro.dbt.reset_csv_table": {"unique_id": "macro.dbt.reset_csv_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.948717, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"unique_id": "macro.dbt.default__reset_csv_table", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9499142, "supported_languages": null}, "macro.dbt.get_csv_sql": {"unique_id": "macro.dbt.get_csv_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9503942, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"unique_id": "macro.dbt.default__get_csv_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.950737, "supported_languages": null}, "macro.dbt.get_binding_char": {"unique_id": "macro.dbt.get_binding_char", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9510899, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"unique_id": "macro.dbt.default__get_binding_char", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.951392, "supported_languages": null}, "macro.dbt.get_batch_size": {"unique_id": "macro.dbt.get_batch_size", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9517949, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"unique_id": "macro.dbt.default__get_batch_size", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9521081, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"unique_id": "macro.dbt.get_seed_column_quoted_csv", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9532878, "supported_languages": null}, "macro.dbt.load_csv_rows": {"unique_id": "macro.dbt.load_csv_rows", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9537542, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"unique_id": "macro.dbt.default__load_csv_rows", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "name": "default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.957227, "supported_languages": null}, "macro.dbt.generate_alias_name": {"unique_id": "macro.dbt.generate_alias_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.958654, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"unique_id": "macro.dbt.default__generate_alias_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "name": "default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.959223, "supported_languages": null}, "macro.dbt.generate_schema_name": {"unique_id": "macro.dbt.generate_schema_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9607859, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"unique_id": "macro.dbt.default__generate_schema_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.961487, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"unique_id": "macro.dbt.generate_schema_name_for_env", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "name": "generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.962187, "supported_languages": null}, "macro.dbt.generate_database_name": {"unique_id": "macro.dbt.generate_database_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.963528, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"unique_id": "macro.dbt.default__generate_database_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "name": "default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9641569, "supported_languages": null}, "macro.dbt.default__test_relationships": {"unique_id": "macro.dbt.default__test_relationships", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "name": "default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9653509, "supported_languages": null}, "macro.dbt.default__test_not_null": {"unique_id": "macro.dbt.default__test_not_null", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "name": "default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.966414, "supported_languages": null}, "macro.dbt.default__test_unique": {"unique_id": "macro.dbt.default__test_unique", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "name": "default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.967387, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"unique_id": "macro.dbt.default__test_accepted_values", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "name": "default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.969207, "supported_languages": null}, "macro.dbt.statement": {"unique_id": "macro.dbt.statement", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.973081, "supported_languages": null}, "macro.dbt.noop_statement": {"unique_id": "macro.dbt.noop_statement", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.974716, "supported_languages": null}, "macro.dbt.run_query": {"unique_id": "macro.dbt.run_query", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "name": "run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9754639, "supported_languages": null}, "macro.dbt.convert_datetime": {"unique_id": "macro.dbt.convert_datetime", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.979558, "supported_languages": null}, "macro.dbt.dates_in_range": {"unique_id": "macro.dbt.dates_in_range", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.982544, "supported_languages": null}, "macro.dbt.partition_range": {"unique_id": "macro.dbt.partition_range", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.984403, "supported_languages": null}, "macro.dbt.py_current_timestring": {"unique_id": "macro.dbt.py_current_timestring", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "name": "py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9849951, "supported_languages": null}, "macro.dbt.except": {"unique_id": "macro.dbt.except", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "name": "except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.98597, "supported_languages": null}, "macro.dbt.default__except": {"unique_id": "macro.dbt.default__except", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "name": "default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.986175, "supported_languages": null}, "macro.dbt.replace": {"unique_id": "macro.dbt.replace", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "name": "replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9873059, "supported_languages": null}, "macro.dbt.default__replace": {"unique_id": "macro.dbt.default__replace", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "name": "default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.987733, "supported_languages": null}, "macro.dbt.concat": {"unique_id": "macro.dbt.concat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "name": "concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.98864, "supported_languages": null}, "macro.dbt.default__concat": {"unique_id": "macro.dbt.default__concat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "name": "default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.988965, "supported_languages": null}, "macro.dbt.length": {"unique_id": "macro.dbt.length", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "name": "length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.990331, "supported_languages": null}, "macro.dbt.default__length": {"unique_id": "macro.dbt.default__length", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "name": "default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9906292, "supported_languages": null}, "macro.dbt.dateadd": {"unique_id": "macro.dbt.dateadd", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.991911, "supported_languages": null}, "macro.dbt.default__dateadd": {"unique_id": "macro.dbt.default__dateadd", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "name": "default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9923341, "supported_languages": null}, "macro.dbt.intersect": {"unique_id": "macro.dbt.intersect", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "name": "intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.993244, "supported_languages": null}, "macro.dbt.default__intersect": {"unique_id": "macro.dbt.default__intersect", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "name": "default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.993451, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"unique_id": "macro.dbt.escape_single_quotes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "name": "escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.994604, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"unique_id": "macro.dbt.default__escape_single_quotes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "name": "default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.995014, "supported_languages": null}, "macro.dbt.right": {"unique_id": "macro.dbt.right", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "name": "right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9960818, "supported_languages": null}, "macro.dbt.default__right": {"unique_id": "macro.dbt.default__right", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "name": "default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9965801, "supported_languages": null}, "macro.dbt.listagg": {"unique_id": "macro.dbt.listagg", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.9982479, "supported_languages": null}, "macro.dbt.default__listagg": {"unique_id": "macro.dbt.default__listagg", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "name": "default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324074.999271, "supported_languages": null}, "macro.dbt.datediff": {"unique_id": "macro.dbt.datediff", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.000368, "supported_languages": null}, "macro.dbt.default__datediff": {"unique_id": "macro.dbt.default__datediff", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "name": "default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0007942, "supported_languages": null}, "macro.dbt.safe_cast": {"unique_id": "macro.dbt.safe_cast", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "name": "safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0018618, "supported_languages": null}, "macro.dbt.default__safe_cast": {"unique_id": "macro.dbt.default__safe_cast", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "name": "default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.002232, "supported_languages": null}, "macro.dbt.hash": {"unique_id": "macro.dbt.hash", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "name": "hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.003175, "supported_languages": null}, "macro.dbt.default__hash": {"unique_id": "macro.dbt.default__hash", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "name": "default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0035892, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"unique_id": "macro.dbt.cast_bool_to_text", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "name": "cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.004607, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"unique_id": "macro.dbt.default__cast_bool_to_text", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "name": "default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.005036, "supported_languages": null}, "macro.dbt.any_value": {"unique_id": "macro.dbt.any_value", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.00601, "supported_languages": null}, "macro.dbt.default__any_value": {"unique_id": "macro.dbt.default__any_value", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "name": "default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0063071, "supported_languages": null}, "macro.dbt.position": {"unique_id": "macro.dbt.position", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "name": "position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.007398, "supported_languages": null}, "macro.dbt.default__position": {"unique_id": "macro.dbt.default__position", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "name": "default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.007754, "supported_languages": null}, "macro.dbt.string_literal": {"unique_id": "macro.dbt.string_literal", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "name": "string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.008699, "supported_languages": null}, "macro.dbt.default__string_literal": {"unique_id": "macro.dbt.default__string_literal", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "name": "default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.008971, "supported_languages": null}, "macro.dbt.type_string": {"unique_id": "macro.dbt.type_string", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.011225, "supported_languages": null}, "macro.dbt.default__type_string": {"unique_id": "macro.dbt.default__type_string", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.011629, "supported_languages": null}, "macro.dbt.type_timestamp": {"unique_id": "macro.dbt.type_timestamp", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.012175, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"unique_id": "macro.dbt.default__type_timestamp", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0125592, "supported_languages": null}, "macro.dbt.type_float": {"unique_id": "macro.dbt.type_float", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.012963, "supported_languages": null}, "macro.dbt.default__type_float": {"unique_id": "macro.dbt.default__type_float", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.013345, "supported_languages": null}, "macro.dbt.type_numeric": {"unique_id": "macro.dbt.type_numeric", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.013751, "supported_languages": null}, "macro.dbt.default__type_numeric": {"unique_id": "macro.dbt.default__type_numeric", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0141928, "supported_languages": null}, "macro.dbt.type_bigint": {"unique_id": "macro.dbt.type_bigint", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.014601, "supported_languages": null}, "macro.dbt.default__type_bigint": {"unique_id": "macro.dbt.default__type_bigint", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.014984, "supported_languages": null}, "macro.dbt.type_int": {"unique_id": "macro.dbt.type_int", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.015394, "supported_languages": null}, "macro.dbt.default__type_int": {"unique_id": "macro.dbt.default__type_int", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.015761, "supported_languages": null}, "macro.dbt.type_boolean": {"unique_id": "macro.dbt.type_boolean", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.016257, "supported_languages": null}, "macro.dbt.default__type_boolean": {"unique_id": "macro.dbt.default__type_boolean", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "name": "default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.01665, "supported_languages": null}, "macro.dbt.array_concat": {"unique_id": "macro.dbt.array_concat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "name": "array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.017704, "supported_languages": null}, "macro.dbt.default__array_concat": {"unique_id": "macro.dbt.default__array_concat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "name": "default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0180538, "supported_languages": null}, "macro.dbt.bool_or": {"unique_id": "macro.dbt.bool_or", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "name": "bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.01898, "supported_languages": null}, "macro.dbt.default__bool_or": {"unique_id": "macro.dbt.default__bool_or", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "name": "default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.019255, "supported_languages": null}, "macro.dbt.last_day": {"unique_id": "macro.dbt.last_day", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.020355, "supported_languages": null}, "macro.dbt.default_last_day": {"unique_id": "macro.dbt.default_last_day", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0210161, "supported_languages": null}, "macro.dbt.default__last_day": {"unique_id": "macro.dbt.default__last_day", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "name": "default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.021465, "supported_languages": null}, "macro.dbt.split_part": {"unique_id": "macro.dbt.split_part", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.023027, "supported_languages": null}, "macro.dbt.default__split_part": {"unique_id": "macro.dbt.default__split_part", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.023608, "supported_languages": null}, "macro.dbt._split_part_negative": {"unique_id": "macro.dbt._split_part_negative", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "name": "_split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.024339, "supported_languages": null}, "macro.dbt.date_trunc": {"unique_id": "macro.dbt.date_trunc", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "name": "date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.02547, "supported_languages": null}, "macro.dbt.default__date_trunc": {"unique_id": "macro.dbt.default__date_trunc", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "name": "default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0258088, "supported_languages": null}, "macro.dbt.array_construct": {"unique_id": "macro.dbt.array_construct", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "name": "array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0271108, "supported_languages": null}, "macro.dbt.default__array_construct": {"unique_id": "macro.dbt.default__array_construct", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "name": "default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.027744, "supported_languages": null}, "macro.dbt.array_append": {"unique_id": "macro.dbt.array_append", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "name": "array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.028975, "supported_languages": null}, "macro.dbt.default__array_append": {"unique_id": "macro.dbt.default__array_append", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "name": "default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0293288, "supported_languages": null}, "macro.dbt.create_schema": {"unique_id": "macro.dbt.create_schema", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.030483, "supported_languages": null}, "macro.dbt.default__create_schema": {"unique_id": "macro.dbt.default__create_schema", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0309591, "supported_languages": null}, "macro.dbt.drop_schema": {"unique_id": "macro.dbt.drop_schema", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.031368, "supported_languages": null}, "macro.dbt.default__drop_schema": {"unique_id": "macro.dbt.default__drop_schema", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "name": "default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0318341, "supported_languages": null}, "macro.dbt.current_timestamp": {"unique_id": "macro.dbt.current_timestamp", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.033173, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"unique_id": "macro.dbt.default__current_timestamp", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.033555, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"unique_id": "macro.dbt.snapshot_get_time", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0339258, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"unique_id": "macro.dbt.default__snapshot_get_time", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.034244, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"unique_id": "macro.dbt.current_timestamp_backcompat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.034715, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"unique_id": "macro.dbt.default__current_timestamp_backcompat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0349262, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.035347, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "name": "default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.035771, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"unique_id": "macro.dbt.get_create_index_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0371048, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"unique_id": "macro.dbt.default__get_create_index_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.037695, "supported_languages": null}, "macro.dbt.create_indexes": {"unique_id": "macro.dbt.create_indexes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.038136, "supported_languages": null}, "macro.dbt.default__create_indexes": {"unique_id": "macro.dbt.default__create_indexes", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "name": "default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.039148, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"unique_id": "macro.dbt.make_intermediate_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0456681, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"unique_id": "macro.dbt.default__make_intermediate_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.046104, "supported_languages": null}, "macro.dbt.make_temp_relation": {"unique_id": "macro.dbt.make_temp_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0466611, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"unique_id": "macro.dbt.default__make_temp_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.047403, "supported_languages": null}, "macro.dbt.make_backup_relation": {"unique_id": "macro.dbt.make_backup_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0480168, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"unique_id": "macro.dbt.default__make_backup_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.048809, "supported_languages": null}, "macro.dbt.drop_relation": {"unique_id": "macro.dbt.drop_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.049268, "supported_languages": null}, "macro.dbt.default__drop_relation": {"unique_id": "macro.dbt.default__drop_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.049806, "supported_languages": null}, "macro.dbt.truncate_relation": {"unique_id": "macro.dbt.truncate_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.050257, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"unique_id": "macro.dbt.default__truncate_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.050667, "supported_languages": null}, "macro.dbt.rename_relation": {"unique_id": "macro.dbt.rename_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.051177, "supported_languages": null}, "macro.dbt.default__rename_relation": {"unique_id": "macro.dbt.default__rename_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.051893, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"unique_id": "macro.dbt.get_or_create_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.052501, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"unique_id": "macro.dbt.default__get_or_create_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.053866, "supported_languages": null}, "macro.dbt.load_cached_relation": {"unique_id": "macro.dbt.load_cached_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.054573, "supported_languages": null}, "macro.dbt.load_relation": {"unique_id": "macro.dbt.load_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0549362, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"unique_id": "macro.dbt.drop_relation_if_exists", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "name": "drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.055425, "supported_languages": null}, "macro.dbt.collect_freshness": {"unique_id": "macro.dbt.collect_freshness", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0570502, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"unique_id": "macro.dbt.default__collect_freshness", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "name": "default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0581172, "supported_languages": null}, "macro.dbt.copy_grants": {"unique_id": "macro.dbt.copy_grants", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.06205, "supported_languages": null}, "macro.dbt.default__copy_grants": {"unique_id": "macro.dbt.default__copy_grants", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.062446, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.062894, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.063191, "supported_languages": null}, "macro.dbt.should_revoke": {"unique_id": "macro.dbt.should_revoke", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.064117, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"unique_id": "macro.dbt.get_show_grant_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0646179, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"unique_id": "macro.dbt.default__get_show_grant_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0649018, "supported_languages": null}, "macro.dbt.get_grant_sql": {"unique_id": "macro.dbt.get_grant_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.065465, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"unique_id": "macro.dbt.default__get_grant_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0659552, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"unique_id": "macro.dbt.get_revoke_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.066529, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"unique_id": "macro.dbt.default__get_revoke_sql", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0670002, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"unique_id": "macro.dbt.get_dcl_statement_list", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0675662, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"unique_id": "macro.dbt.default__get_dcl_statement_list", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.069387, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"unique_id": "macro.dbt.call_dcl_statements", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.070018, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"unique_id": "macro.dbt.default__call_dcl_statements", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.070652, "supported_languages": null}, "macro.dbt.apply_grants": {"unique_id": "macro.dbt.apply_grants", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.071306, "supported_languages": null}, "macro.dbt.default__apply_grants": {"unique_id": "macro.dbt.default__apply_grants", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "name": "default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.074133, "supported_languages": null}, "macro.dbt.alter_column_comment": {"unique_id": "macro.dbt.alter_column_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.07601, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"unique_id": "macro.dbt.default__alter_column_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.076446, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"unique_id": "macro.dbt.alter_relation_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0769558, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"unique_id": "macro.dbt.default__alter_relation_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.077386, "supported_languages": null}, "macro.dbt.persist_docs": {"unique_id": "macro.dbt.persist_docs", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.078047, "supported_languages": null}, "macro.dbt.default__persist_docs": {"unique_id": "macro.dbt.default__persist_docs", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "name": "default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0792341, "supported_languages": null}, "macro.dbt.get_catalog": {"unique_id": "macro.dbt.get_catalog", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0824351, "supported_languages": null}, "macro.dbt.default__get_catalog": {"unique_id": "macro.dbt.default__get_catalog", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0830872, "supported_languages": null}, "macro.dbt.information_schema_name": {"unique_id": "macro.dbt.information_schema_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.083549, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"unique_id": "macro.dbt.default__information_schema_name", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.083961, "supported_languages": null}, "macro.dbt.list_schemas": {"unique_id": "macro.dbt.list_schemas", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0844128, "supported_languages": null}, "macro.dbt.default__list_schemas": {"unique_id": "macro.dbt.default__list_schemas", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.085026, "supported_languages": null}, "macro.dbt.check_schema_exists": {"unique_id": "macro.dbt.check_schema_exists", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0855522, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"unique_id": "macro.dbt.default__check_schema_exists", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0863092, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"unique_id": "macro.dbt.list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.086777, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"unique_id": "macro.dbt.default__list_relations_without_caching", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "name": "default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.087187, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"unique_id": "macro.dbt.get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.09098, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"unique_id": "macro.dbt.default__get_columns_in_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0918489, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"unique_id": "macro.dbt.sql_convert_columns_in_relation", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.092635, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"unique_id": "macro.dbt.get_columns_in_query", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0931032, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"unique_id": "macro.dbt.default__get_columns_in_query", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.09394, "supported_languages": null}, "macro.dbt.alter_column_type": {"unique_id": "macro.dbt.alter_column_type", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.094506, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"unique_id": "macro.dbt.default__alter_column_type", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.0959759, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"unique_id": "macro.dbt.alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.096619, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "name": "default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.098658, "supported_languages": null}, "macro.dbt.build_ref_function": {"unique_id": "macro.dbt.build_ref_function", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {%- set resolved = ref(*_ref) -%}\n {%- do ref_dict.update({_ref | join(\".\"): resolved.quote(database=False, schema=False, identifier=False) | string}) -%}\n {%- endfor -%}\n\ndef ref(*args,dbt_load_df_function):\n refs = {{ ref_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.10208, "supported_languages": null}, "macro.dbt.build_source_function": {"unique_id": "macro.dbt.build_source_function", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join(\".\"): resolved.quote(database=False, schema=False, identifier=False) | string}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.103234, "supported_languages": null}, "macro.dbt.build_config_dict": {"unique_id": "macro.dbt.build_config_dict", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {%- for key in model.config.config_keys_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == 'language' -%}\n {%- set value = 'python' -%}\n {%- endif -%}\n {%- set value = model.config[key] -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.104405, "supported_languages": null}, "macro.dbt.py_script_postfix": {"unique_id": "macro.dbt.py_script_postfix", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = '{{ this.database }}'\n schema = '{{ this.schema }}'\n identifier = '{{ this.identifier }}'\n def __repr__(self):\n return '{{ this }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args: ref(*args, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.105411, "supported_languages": null}, "macro.dbt.py_script_comment": {"unique_id": "macro.dbt.py_script_comment", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "name": "py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "resource_type": "macro", "tags": [], "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.105611, "supported_languages": null}, "macro.dbt.test_unique": {"unique_id": "macro.dbt.test_unique", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.107241, "supported_languages": null}, "macro.dbt.test_not_null": {"unique_id": "macro.dbt.test_not_null", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.10783, "supported_languages": null}, "macro.dbt.test_accepted_values": {"unique_id": "macro.dbt.test_accepted_values", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.108544, "supported_languages": null}, "macro.dbt.test_relationships": {"unique_id": "macro.dbt.test_relationships", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "name": "test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "resource_type": "macro", "tags": [], "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676324075.109239, "supported_languages": null}}, "docs": {"test.somedoc": {"unique_id": "test.somedoc", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "name": "somedoc", "block_contents": "Testing, testing"}, "dbt.__overview__": {"unique_id": "dbt.__overview__", "package_name": "dbt", "root_path": "/Users/gerda/FTA/dbt/core/dbt/include/global_project", "path": "overview.md", "original_file_path": "docs/overview.md", "name": "__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"fqn": ["test", "simple_exposure"], "unique_id": "exposure.test.simple_exposure", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "schema.yml", "original_file_path": "models/schema.yml", "name": "simple_exposure", "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "resource_type": "exposure", "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [["my_model"]], "sources": [["my_source", "my_table"]], "created_at": 1676324075.609121}}, "metrics": {"metric.test.my_metric": {"fqn": ["test", "my_metric"], "unique_id": "metric.test.my_metric", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "schema.yml", "original_file_path": "models/schema.yml", "name": "my_metric", "description": "", "label": "Count records", "calculation_method": "count", "timestamp": "updated_at", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "window": null, "model": "ref('my_model')", "model_unique_id": null, "resource_type": "metric", "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "metrics": [], "created_at": 1676324075.618992}}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"resource_type": "model", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "disabled_model"], "unique_id": "model.test.disabled_model", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "name": "disabled_model", "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "34f7b8e60d9e7933469c48d6c92b0a53918d0ba626a9ce2c30ab2f1532145827"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.4071748, "config_call_dict": {"enabled": false}}], "snapshot.test.disabled_snapshot_seed": [{"resource_type": "snapshot", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "strategy": "check", "target_schema": "test16763240740000063267_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "unique_id": "snapshot.test.disabled_snapshot_seed", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "name": "disabled_snapshot_seed", "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "52b08465e16dcbc364162dfbdb34cf25e04295bc13d63ab0b420f60d15234c76"}, "tags": [], "refs": [["my_seed"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763240740000063267_test_previous_version_state", "enabled": false}, "created_at": 1676324075.4334059, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763240740000063267_test_previous_version_state", "enabled": false}}], "analysis.test.disabled_al": [{"resource_type": "analysis", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "analysis", "disabled_al"], "unique_id": "analysis.test.disabled_al", "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "name": "disabled_al", "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "76b8579816eac97721616fd429dcd1a93c311c6358830a65d40ebe5661572610"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.4483929, "config_call_dict": {"enabled": false}}], "test.test.disabled_just_my": [{"resource_type": "test", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "disabled_just_my"], "unique_id": "test.test.disabled_just_my", "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "name": "disabled_just_my", "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "0b5827d08d1e3c97e8fb865bea00031b2e90ecef7884a42429cc48d0f48b8c20"}, "tags": [], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.5147672, "config_call_dict": {"enabled": false}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "resource_type": "test", "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state_dbt_test__audit", "fqn": ["test", "disabled_check_nothing_my_model_"], "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "name": "disabled_check_nothing_my_model_", "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "tags": [], "refs": [["my_model"]], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.587028, "config_call_dict": {"enabled": false}, "column_name": null, "file_key_name": "models.my_model"}], "exposure.test.disabled_exposure": [{"fqn": ["test", "disabled_exposure"], "unique_id": "exposure.test.disabled_exposure", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "schema.yml", "original_file_path": "models/schema.yml", "name": "disabled_exposure", "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "resource_type": "exposure", "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [["my_model"]], "sources": [], "created_at": 1676324075.612152}], "metric.test.disabled_metric": [{"fqn": ["test", "disabled_metric"], "unique_id": "metric.test.disabled_metric", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "schema.yml", "original_file_path": "models/schema.yml", "name": "disabled_metric", "description": "", "label": "Count records", "calculation_method": "count", "timestamp": "updated_at", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "window": null, "model": "ref('my_model')", "model_unique_id": null, "resource_type": "metric", "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [["my_model"]], "metrics": [], "created_at": 1676324075.622605}], "seed.test.disabled_seed": [{"resource_type": "seed", "depends_on": {"macros": [], "nodes": []}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "database": "dbt", "schema": "test16763240740000063267_test_previous_version_state", "fqn": ["test", "disabled_seed"], "unique_id": "seed.test.disabled_seed", "raw_code": "", "language": "sql", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "name": "disabled_seed", "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "c6c08a913b5a382014ef0ba248d97b12fc801beb369fdbd24aff1a3912ee3773"}, "tags": [], "refs": [], "sources": [], "metrics": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "compiled_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676324075.594186, "config_call_dict": {}}], "source.test.my_source.disabled_table": [{"fqn": ["test", "my_source", "disabled_table"], "database": "dbt", "schema": "my_source", "unique_id": "source.test.my_source.disabled_table", "package_name": "test", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-126/project0", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "name": "disabled_table", "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "resource_type": "source", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1676324075.625102}]}, "parent_map": {"model.test.my_model": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "metric.test.my_metric", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v8/manifest.json b/tests/functional/artifacts/data/state/v8/manifest.json deleted file mode 100644 index df5c8738e..000000000 --- a/tests/functional/artifacts/data/state/v8/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v8.json", "dbt_version": "1.5.0a1", "generated_at": "2023-02-13T21:04:43.788883Z", "invocation_id": "c7896040-31e1-487d-8438-19d703edb137", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "2b9123e04ab8bb798f7c565afdc3ee0e56fcd66b4bfbdb435b4891c878d947c5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "quote": null, "tags": []}}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.4291918, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "00c13c306831070996970605fbc4c901aa456e1ed1c028725a932e4e6a4ffb0a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "strategy": "check", "target_schema": "test16763222812618906995_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763222812618906995_test_previous_version_state"}, "created_at": 1676322282.28191, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [["my_seed"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null}, "analysis.test.a": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "bd1ee600e4e80d03f488fee52a66e8d51b5be2b98acc20df1cf8be4670d86ae5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.338664, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}, "test.test.just_my": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "f30b7a814e0e3761d1a8042aa40d658d6c33affb28cd92782b0f56559c414fd8"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1676322282.365304, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null}, "seed.test.my_seed": {"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "22697c9b76d73a6c7561554ddb2ce101428ea2737ba8dc500d52ebcfdcfcfc13"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.395373, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-115/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.439473, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "column_name": "id", "file_key_name": "models.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1676322282.4446359, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "column_name": null, "file_key_name": "models.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1676322282.498101}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4094772, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.410033, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.41051, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.411176, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.411718, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.412009, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.41232, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.412619, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4152992, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4168088, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.428651, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.430589, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4315221, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.432323, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.433569, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.434568, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4350138, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.435891, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.436857, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4389682, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.439469, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4407659, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.441967, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.443386, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4441102, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.446302, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4471622, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.447847, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.449656, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.452299, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.453088, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.455125, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.462395, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.463126, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.464517, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4655108, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.468192, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.468693, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.469058, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4694211, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.469785, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.470596, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.471361, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.472131, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.473068, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4737349, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.481837, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4828649, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4835358, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4857202, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4861922, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.486644, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_code'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.490088, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.4935129, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5012, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.501941, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.502363, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5025961, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.502961, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5032582, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.50385, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5059588, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5064478, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5070798, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.508459, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.523073, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.528616, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.52976, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.531104, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5320342, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.533005, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.536449, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.537867, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.539545, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5401359, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5423229, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.554441, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.558579, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.559229, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.561732, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.56239, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.564075, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.565938, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.567849, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.568505, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5689778, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.569709, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.570188, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5709162, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.57139, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.572049, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.572522, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.5729191, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.57362, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.585784, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.59618, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.60002, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.603049, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.605203, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.611255, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.612431, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6129012, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.613973, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.614995, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.621283, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6220968, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.622665, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.626231, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.627094, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.627518, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.627975, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.628621, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6366222, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6457422, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.648115, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.648722, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.650075, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.65061, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6509619, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6513228, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6516201, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.652027, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6523268, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6536052, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.654092, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.657616, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.658516, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.659103, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.660212, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.660863, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.661577, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.662461, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6631, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.664021, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.664862, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.665466, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.666805, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.670157, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.671575, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.672309, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.675864, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.678925, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.680963, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.681591, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.682165, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.682366, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6831412, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.683658, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.684277, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6846101, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.685228, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.685508, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6863098, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6870232, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.687619, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6878238, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.688459, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.688828, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.689515, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.689863, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.691304, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6924748, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.693249, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.69366, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.694339, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6946921, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.695292, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.695692, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.696287, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.696754, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.697381, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6978078, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.698524, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.6990662, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.69984, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.700127, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.702124, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.702521, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.702935, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.703326, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.703733, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.704247, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7046552, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.705098, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.705503, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.705885, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7063448, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7067552, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7071831, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.707561, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.708241, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7085838, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.709186, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7094588, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7102468, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.710925, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.711309, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.712547, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.71298, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.713571, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.714258, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.714612, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7155101, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.716278, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7171369, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.717506, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.718344, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.71883, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7192378, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.719709, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.720729, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7211258, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.721498, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.721788, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.722215, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7224221, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7228422, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.723277, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.724287, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7246542, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.725062, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.726084, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.732279, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.732851, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7335029, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.734247, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7348611, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7356532, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7361112, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7366579, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7371142, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.737532, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.738038, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7387478, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7393658, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.740736, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.741308, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.741668, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7421598, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.743146, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7441761, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7475011, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7478158, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.748252, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7485409, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.749697, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.750331, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7506151, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.751184, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7516642, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.752241, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.752723, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7532978, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7553658, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.756046, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7566988, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7572742, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.760213, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.761635, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.762066, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.762573, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.762995, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7636638, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.76503, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.767971, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7686348, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.769115, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.769671, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.770122, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.770835, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.771361, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.772129, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.772596, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7730088, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.776206, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.776611, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.777584, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.77807, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.77891, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.77949, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7810528, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.781748, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7839968, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {%- set resolved = ref(*_ref) -%}\n {%- do ref_dict.update({_ref | join(\".\"): resolved | string | replace('\"', '\\\"')}) -%}\n {%- endfor -%}\n\ndef ref(*args,dbt_load_df_function):\n refs = {{ ref_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.787274, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join(\".\"): resolved | string | replace('\"', '\\\"')}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = \".\".join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.788432, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.790293, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = this | string | replace('\"', '\\\\\"') %}\n def __repr__(self):\n return \"{{ this_relation_name }}\"\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args: ref(*args, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.791504, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.791784, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7929192, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.793498, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.794215, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1676322281.7948952, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [["my_model"]], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1676322282.478955}}, "metrics": {"metric.test.my_metric": {"name": "my_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.my_metric", "fqn": ["test", "my_metric"], "description": "", "label": "Count records", "calculation_method": "count", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "timestamp": "updated_at", "window": null, "model": "ref('my_model')", "model_unique_id": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [["my_model"]], "metrics": [], "created_at": 1676322282.491698}}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "34f7b8e60d9e7933469c48d6c92b0a53918d0ba626a9ce2c30ab2f1532145827"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.224511, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "52b08465e16dcbc364162dfbdb34cf25e04295bc13d63ab0b420f60d15234c76"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "strategy": "check", "target_schema": "test16763222812618906995_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763222812618906995_test_previous_version_state", "enabled": false}, "created_at": 1676322282.303265, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16763222812618906995_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [["my_seed"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "76b8579816eac97721616fd429dcd1a93c311c6358830a65d40ebe5661572610"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.3320582, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "0b5827d08d1e3c97e8fb865bea00031b2e90ecef7884a42429cc48d0f48b8c20"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.359573, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16763222812618906995_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.455549, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [["my_model"]], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "column_name": null, "file_key_name": "models.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [["my_model"]], "sources": [], "metrics": [], "created_at": 1676322282.482795}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "calculation_method": "count", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "timestamp": "updated_at", "window": null, "model": "ref('my_model')", "model_unique_id": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [["my_model"]], "metrics": [], "created_at": 1676322282.495338}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test16763222812618906995_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "c6c08a913b5a382014ef0ba248d97b12fc801beb369fdbd24aff1a3912ee3773"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1676322282.462719, "config_call_dict": {}, "relation_name": "\"dbt\".\"test16763222812618906995_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/qt/vw8wqdgx4w381wh14b9y25m40000gn/T/pytest-of-gerda/pytest-115/project0", "depends_on": {"macros": []}}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1676322282.498409}]}, "parent_map": {"model.test.my_model": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "metric.test.my_metric", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.my_metric": []}} diff --git a/tests/functional/artifacts/data/state/v9/manifest.json b/tests/functional/artifacts/data/state/v9/manifest.json deleted file mode 100644 index febb34712..000000000 --- a/tests/functional/artifacts/data/state/v9/manifest.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v9.json", "dbt_version": "1.5.0b5", "generated_at": "2023-04-10T02:53:50.434615Z", "invocation_id": "7e6390ca-c227-4a45-b9e0-85eeb260e9a8", "env": {}, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.898038, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "version": null, "is_latest_version": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16810952296205305560_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16810952296205305560_test_previous_version_state"}, "created_at": 1681095229.843765, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.8655732, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1681095229.884334, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.889285, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/k6/gtt07v8j2vn51m_z05xk_fjc0000gp/T/pytest-of-michelleark/pytest-80/project5", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.898516, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1681095229.900049, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1681095229.938866}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.549095, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.549314, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.549501, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5497909, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.549994, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5501041, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5502121, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5503209, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.551399, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.552023, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_columns_spec_ddl() }} ;\n insert into {{ relation }} {{ get_column_names() }}\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_columns_spec_ddl", "macro.dbt_postgres.get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5590951, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.559607, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5599282, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.560247, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5607271, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5611079, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.561283, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.561631, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.56203, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.562905, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5631082, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.563437, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.563714, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.564138, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.564367, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5649762, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.565191, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.565307, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.565693, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5664032, "supported_languages": null}, "macro.dbt_postgres.get_column_names": {"name": "get_column_names", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/columns_spec_ddl.sql", "original_file_path": "macros/utils/columns_spec_ddl.sql", "unique_id": "macro.dbt_postgres.get_column_names", "macro_sql": "{% macro get_column_names() %}\n {# loop through user_provided_columns to get column names #}\n {%- set user_provided_columns = model['columns'] -%}\n (\n {% for i in user_provided_columns %}\n {% set col = user_provided_columns[i] %}\n {{ col['name'] }} {{ \",\" if not loop.last }}\n {% endfor %}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.566866, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.567093, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5678, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.571049, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5712, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.571703, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.572116, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.573218, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.573418, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.57356, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5737019, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.573847, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5742688, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5745878, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.574906, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.575354, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5756302, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.579268, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.579456, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.579693, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5804448, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5806148, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5807948, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n select {{ check_cols_config | join(', ') }} from ({{ node['compiled_code'] }}) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5822341, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.583667, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.587839, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.58813, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5883, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.588392, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5885382, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.588657, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.588872, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5897758, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.589972, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.590228, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5906692, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5969589, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.599081, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.5995462, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.599873, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.600277, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.600673, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6024802, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.603078, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.603765, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6040099, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.604749, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.61131, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.613036, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6133099, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.614329, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6146078, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.615274, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6159291, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.616825, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.617065, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6172569, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.617557, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6177459, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.618047, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.618237, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.618508, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6187038, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.618857, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.619138, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.624626, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.630311, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.631569, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6327949, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.633666, "supported_languages": null}, "macro.dbt.get_columns_spec_ddl": {"name": "get_columns_spec_ddl", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_columns_spec_ddl", "macro_sql": "{%- macro get_columns_spec_ddl() -%}\n {{ adapter.dispatch('get_columns_spec_ddl', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_columns_spec_ddl"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.634799, "supported_languages": null}, "macro.dbt.default__get_columns_spec_ddl": {"name": "default__get_columns_spec_ddl", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_columns_spec_ddl", "macro_sql": "{% macro default__get_columns_spec_ddl() -%}\n {{ return(columns_spec_ddl()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.columns_spec_ddl"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.634922, "supported_languages": null}, "macro.dbt.columns_spec_ddl": {"name": "columns_spec_ddl", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.columns_spec_ddl", "macro_sql": "{% macro columns_spec_ddl() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set user_provided_columns = model['columns'] -%}\n (\n {% for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set constraints = col['constraints'] -%}\n {{ col['name'] }} {{ col['data_type'] }}{% for c in constraints %} {{ adapter.render_raw_column_constraint(c) }}{% endfor %}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6354618, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6356301, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.635766, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(model['columns'])) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set string_sql_columns = stringify_formatted_columns(sql_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n {%- set string_yaml_columns = stringify_formatted_columns(yaml_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(string_yaml_columns, string_sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(string_yaml_columns, string_sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(string_yaml_columns, string_sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns", "macro.dbt.stringify_formatted_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.636966, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append({'name': column.name, 'formatted': formatted_column}) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.637417, "supported_languages": null}, "macro.dbt.stringify_formatted_columns": {"name": "stringify_formatted_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.stringify_formatted_columns", "macro_sql": "{% macro stringify_formatted_columns(formatted_columns) %}\n {% set column_strings = [] %}\n {% for column in formatted_columns %}\n {% do column_strings.append(column['formatted']) %}\n {% endfor %}\n {{ return(column_strings|join(', ')) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6377542, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {{ return(column.column.lower() ~ \" \" ~ column.dtype) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6379352, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.640608, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.641506, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.641694, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.642134, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_columns_spec_ddl() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_columns_spec_ddl", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.642807, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.643002, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select\n {% for column in model['columns'] %}\n {{ column }}{{ \", \" if not loop.last }}\n {% endfor %}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.643258, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.645857, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.64616, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6463842, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=True) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6479082, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.648345, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.648515, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6487029, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6491442, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparision later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.652572, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.657813, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.65873, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.658967, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.659456, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.659651, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.659791, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.659935, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6600509, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6602108, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6603289, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.660815, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6610072, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.66231, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.662733, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ node.version) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6630669, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6635892, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.663862, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.664153, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6645498, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.664808, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6651552, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.66545, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.665698, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6662662, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.667764, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6684191, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.668716, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.670603, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.671814, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6726348, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.672904, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6731641, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.673249, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.673606, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.673785, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.674051, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6741881, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.674554, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.674687, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.675072, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6752498, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.675481, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.675561, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6758258, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6759732, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.676275, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6765099, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6771588, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.677581, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.677927, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.678091, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6783779, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6785228, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6787798, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6789498, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.679196, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6793652, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6796181, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6797252, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.680025, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6801689, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.680419, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6805272, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.681462, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.681621, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6818528, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6820118, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6821811, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.682334, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6826391, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.682859, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683039, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683199, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683383, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683554, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683734, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.683891, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.684203, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.684342, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6845891, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.684702, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.685059, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.685353, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.685507, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6860402, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.686302, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.686583, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6868732, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.687006, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.687397, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.687667, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.687964, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.688101, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.688492, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.688687, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.688858, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.689048, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6895611, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.689718, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.689871, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6899788, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.690153, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.690235, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6904068, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.690679, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.691232, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6913862, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.69156, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.692, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.695603, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6958349, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6960871, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.696411, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6966882, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.697024, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6972108, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.697429, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6976268, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.697804, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.69802, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6983092, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.698685, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.6992798, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.699531, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.699724, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.699935, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.700392, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7008178, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7027688, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.702912, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7031, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.703357, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7037492, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7039511, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7040598, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.704402, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.704654, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.704913, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.705131, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.705385, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7062578, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.706471, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.706742, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.707036, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.708344, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7090821, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.709265, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7094889, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7096682, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.709949, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7104452, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.711974, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.712246, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.712435, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7125928, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.712777, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7130291, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7132502, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.713568, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.713761, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.713928, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.716088, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7163389, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.716653, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.71684, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql) %}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.71695, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.717133, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {{ col_err.append(col['name']) }}\n {%- endif -%}\n cast(null as {{ col['data_type'] }}) as {{ col['name'] }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.717796, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.718113, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7183008, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7186701, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.718902, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.719517, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7197812, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.720577, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7219672, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.722124, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {%- set resolved = ref(*_ref) -%}\n {%- do ref_dict.update({_ref | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args,dbt_load_df_function):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.722541, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.722951, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.723784, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args: ref(*args, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.724266, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.724343, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.724848, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.7250812, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.725373, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1681095229.725664, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1681095229.907179}}, "metrics": {"metric.test.my_metric": {"name": "my_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.my_metric", "fqn": ["test", "my_metric"], "description": "", "label": "Count records", "calculation_method": "count", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "timestamp": "updated_at", "window": null, "model": "ref('my_model')", "model_unique_id": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "metrics": [], "created_at": 1681095229.936167, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.840684, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "version": null, "is_latest_version": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16810952296205305560_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16810952296205305560_test_previous_version_state", "enabled": false}, "created_at": 1681095229.846447, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16810952296205305560_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.863457, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.882241, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16810952296205305560_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.9022238, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1681095229.9083421}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "calculation_method": "count", "expression": "*", "filters": [], "time_grains": ["day"], "dimensions": [], "timestamp": "updated_at", "window": null, "model": "ref('my_model')", "model_unique_id": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "metrics": [], "created_at": 1681095229.937823, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test16810952296205305560_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1681095229.905121, "config_call_dict": {}, "relation_name": "\"dbt\".\"test16810952296205305560_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/k6/gtt07v8j2vn51m_z05xk_fjc0000gp/T/pytest-of-michelleark/pytest-80/project5", "depends_on": {"macros": []}}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1681095229.939002}]}, "parent_map": {"model.test.my_model": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.my_metric": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "metric.test.my_metric", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.my_metric": []}, "group_map": {}} diff --git a/tests/functional/artifacts/expected_manifest.py b/tests/functional/artifacts/expected_manifest.py deleted file mode 100644 index 90f480c8a..000000000 --- a/tests/functional/artifacts/expected_manifest.py +++ /dev/null @@ -1,1938 +0,0 @@ -import hashlib -import os -from unittest.mock import ANY - -import dbt -from dbt.tests.util import AnyStringWith - - -# This produces an "expected manifest", with a number of the fields -# modified to avoid ephemeral changes. -# ANY -# AnyStringWith -# LineIndifferent -# It also uses some convenience methods to generate the -# various config dictionaries. - - -def get_rendered_model_config(**updates): - result = { - "database": None, - "schema": None, - "alias": None, - "enabled": True, - "group": None, - "materialized": "view", - "pre-hook": [], - "post-hook": [], - "column_types": {}, - "quoting": {}, - "tags": [], - "persist_docs": {}, - "full_refresh": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "meta": {}, - "unique_key": None, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - "access": "protected", - } - result.update(updates) - return result - - -def get_unrendered_model_config(**updates): - return updates - - -def get_rendered_seed_config(**updates): - result = { - "enabled": True, - "group": None, - "materialized": "seed", - "persist_docs": {}, - "pre-hook": [], - "post-hook": [], - "column_types": {}, - "delimiter": ",", - "quoting": {}, - "tags": [], - "quote_columns": True, - "full_refresh": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "database": None, - "schema": None, - "alias": None, - "meta": {}, - "unique_key": None, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - } - result.update(updates) - return result - - -def get_unrendered_seed_config(**updates): - result = {"quote_columns": True} - result.update(updates) - return result - - -def get_rendered_snapshot_config(**updates): - result = { - "database": None, - "schema": None, - "alias": None, - "enabled": True, - "group": None, - "materialized": "snapshot", - "pre-hook": [], - "post-hook": [], - "column_types": {}, - "quoting": {}, - "tags": [], - "persist_docs": {}, - "full_refresh": None, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "strategy": "check", - "check_cols": "all", - "unique_key": "id", - "target_database": None, - "target_schema": None, - "updated_at": None, - "meta": {}, - "grants": {}, - "packages": [], - "incremental_strategy": None, - "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False, "alias_types": True}, - } - result.update(updates) - return result - - -def get_unrendered_snapshot_config(**updates): - result = {"check_cols": "all", "strategy": "check", "target_schema": None, "unique_key": "id"} - result.update(updates) - return result - - -def get_rendered_tst_config(**updates): - result = { - "enabled": True, - "group": None, - "materialized": "test", - "tags": [], - "severity": "ERROR", - "store_failures": None, - "store_failures_as": None, - "warn_if": "!= 0", - "error_if": "!= 0", - "fail_calc": "count(*)", - "where": None, - "limit": None, - "database": None, - "schema": "dbt_test__audit", - "alias": None, - "meta": {}, - } - result.update(updates) - return result - - -def get_unrendered_tst_config(**updates): - result = {} - result.update(updates) - return result - - -def quote(value): - quote_char = '"' - return "{0}{1}{0}".format(quote_char, value) - - -def relation_name_format(quote_database: bool, quote_schema: bool, quote_identifier: bool): - return ".".join( - ( - quote("{0}") if quote_database else "{0}", - quote("{1}") if quote_schema else "{1}", - quote("{2}") if quote_identifier else "{2}", - ) - ) - - -def checksum_file(path): - """windows has silly git behavior that adds newlines, and python does - silly things if we just open(..., 'r').encode('utf-8'). - """ - with open(path, "rb") as fp: - # We strip the file contents because we want the checksum to match the stored contents - hashed = hashlib.sha256(fp.read().strip()).hexdigest() - return { - "name": "sha256", - "checksum": hashed, - } - - -def read_file_replace_returns(path): - with open(path, "r") as fp: - return fp.read().replace("\r", "").replace("\n", "") - - -class LineIndifferent: - def __init__(self, expected): - self.expected = expected.replace("\r", "") - - def __eq__(self, other): - got = other.replace("\r", "").replace("\n", "") - return self.expected == got - - def __repr__(self): - return "LineIndifferent({!r})".format(self.expected) - - def __str__(self): - return self.__repr__() - - -def expected_seeded_manifest(project, model_database=None, quote_model=False): - model_sql_path = os.path.join("models", "model.sql") - second_model_sql_path = os.path.join("models", "second_model.sql") - model_schema_yml_path = os.path.join("models", "schema.yml") - seed_schema_yml_path = os.path.join("seeds", "schema.yml") - seed_path = os.path.join("seeds", "seed.csv") - snapshot_path = os.path.join("snapshots", "snapshot_seed.sql") - - my_schema_name = project.test_schema - alternate_schema = project.test_schema + "_test" - test_audit_schema = my_schema_name + "_dbt_test__audit" - - model_database = project.database - - model_config = get_rendered_model_config(docs={"node_color": None, "show": False}) - second_config = get_rendered_model_config( - schema="test", docs={"node_color": None, "show": False} - ) - - unrendered_model_config = get_unrendered_model_config( - materialized="view", docs={"show": False} - ) - - unrendered_second_config = get_unrendered_model_config( - schema="test", materialized="view", docs={"show": False} - ) - - seed_config = get_rendered_seed_config() - unrendered_seed_config = get_unrendered_seed_config() - - test_config = get_rendered_tst_config() - unrendered_test_config = get_unrendered_tst_config() - - snapshot_config = get_rendered_snapshot_config(target_schema=alternate_schema) - unrendered_snapshot_config = get_unrendered_snapshot_config(target_schema=alternate_schema) - - quote_database = quote_schema = True - relation_name_node_format = relation_name_format(quote_database, quote_schema, quote_model) - relation_name_source_format = relation_name_format( - quote_database, quote_schema, quote_identifier=True - ) - - compiled_model_path = os.path.join("target", "compiled", "test", "models") - - model_raw_code = read_file_replace_returns(model_sql_path).rstrip("\r\n") - - return { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v7.json", - "dbt_version": dbt.version.__version__, - "nodes": { - "model.test.model": { - "compiled_path": os.path.join(compiled_model_path, "model.sql"), - "build_path": None, - "created_at": ANY, - "name": "model", - "relation_name": relation_name_node_format.format( - model_database, my_schema_name, "model" - ), - "resource_type": "model", - "path": "model.sql", - "original_file_path": model_sql_path, - "package_name": "test", - "raw_code": LineIndifferent(model_raw_code), - "language": "sql", - "refs": [{"name": "seed", "package": None, "version": None}], - "sources": [], - "depends_on": {"nodes": ["seed.test.seed"], "macros": []}, - "deprecation_date": None, - "unique_id": "model.test.model", - "fqn": ["test", "model"], - "metrics": [], - "tags": [], - "meta": {}, - "config": model_config, - "group": None, - "schema": my_schema_name, - "database": model_database, - "deferred": False, - "alias": "model", - "description": "The test model", - "columns": { - "id": { - "name": "id", - "description": "The user ID number", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "first_name": { - "name": "first_name", - "description": "The user's first name", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "email": { - "name": "email", - "description": "The user's email", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "ip_address": { - "name": "ip_address", - "description": "The user's IP address", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "updated_at": { - "name": "updated_at", - "description": "The last time this user's email was updated", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - }, - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "constraints": [], - "patch_path": "test://" + model_schema_yml_path, - "docs": {"node_color": None, "show": False}, - "compiled": True, - "compiled_code": ANY, - "extra_ctes_injected": True, - "extra_ctes": [], - "checksum": checksum_file(model_sql_path), - "unrendered_config": unrendered_model_config, - "access": "protected", - "version": None, - "latest_version": None, - }, - "model.test.second_model": { - "compiled_path": os.path.join(compiled_model_path, "second_model.sql"), - "build_path": None, - "created_at": ANY, - "name": "second_model", - "relation_name": relation_name_node_format.format( - project.database, alternate_schema, "second_model" - ), - "resource_type": "model", - "path": "second_model.sql", - "original_file_path": second_model_sql_path, - "package_name": "test", - "raw_code": LineIndifferent( - read_file_replace_returns(second_model_sql_path).rstrip("\r\n") - ), - "language": "sql", - "refs": [{"name": "seed", "package": None, "version": None}], - "sources": [], - "depends_on": {"nodes": ["seed.test.seed"], "macros": []}, - "deprecation_date": None, - "unique_id": "model.test.second_model", - "fqn": ["test", "second_model"], - "metrics": [], - "tags": [], - "meta": {}, - "config": second_config, - "group": None, - "schema": alternate_schema, - "database": project.database, - "deferred": False, - "alias": "second_model", - "description": "The second test model", - "columns": { - "id": { - "name": "id", - "description": "The user ID number", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "first_name": { - "name": "first_name", - "description": "The user's first name", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "email": { - "name": "email", - "description": "The user's email", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "ip_address": { - "name": "ip_address", - "description": "The user's IP address", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "updated_at": { - "name": "updated_at", - "description": "The last time this user's email was updated", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - }, - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "constraints": [], - "patch_path": "test://" + model_schema_yml_path, - "docs": {"node_color": None, "show": False}, - "compiled": True, - "compiled_code": ANY, - "extra_ctes_injected": True, - "extra_ctes": [], - "checksum": checksum_file(second_model_sql_path), - "unrendered_config": unrendered_second_config, - "access": "protected", - "version": None, - "latest_version": None, - }, - "seed.test.seed": { - "build_path": None, - "created_at": ANY, - "config": seed_config, - "group": None, - "patch_path": "test://" + seed_schema_yml_path, - "path": "seed.csv", - "name": "seed", - "root_path": project.project_root, - "resource_type": "seed", - "raw_code": "", - "package_name": "test", - "original_file_path": seed_path, - "unique_id": "seed.test.seed", - "fqn": ["test", "seed"], - "tags": [], - "meta": {}, - "depends_on": {"macros": []}, - "schema": my_schema_name, - "database": project.database, - "alias": "seed", - "deferred": False, - "description": "The test seed", - "columns": { - "id": { - "name": "id", - "description": "The user ID number", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "first_name": { - "name": "first_name", - "description": "The user's first name", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "email": { - "name": "email", - "description": "The user's email", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "ip_address": { - "name": "ip_address", - "description": "The user's IP address", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "updated_at": { - "name": "updated_at", - "description": "The last time this user's email was updated", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - }, - "docs": {"node_color": None, "show": True}, - "checksum": checksum_file(seed_path), - "unrendered_config": unrendered_seed_config, - "relation_name": relation_name_node_format.format( - project.database, my_schema_name, "seed" - ), - }, - "test.test.not_null_model_id.d01cc630e6": { - "alias": "not_null_model_id", - "attached_node": "model.test.model", - "compiled_path": os.path.join( - compiled_model_path, "schema.yml", "not_null_model_id.sql" - ), - "build_path": None, - "created_at": ANY, - "column_name": "id", - "columns": {}, - "config": test_config, - "sources": [], - "group": None, - "depends_on": { - "macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], - "nodes": ["model.test.model"], - }, - "deferred": False, - "description": "", - "file_key_name": "models.model", - "fqn": ["test", "not_null_model_id"], - "metrics": [], - "name": "not_null_model_id", - "original_file_path": model_schema_yml_path, - "package_name": "test", - "patch_path": None, - "path": "not_null_model_id.sql", - "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", - "language": "sql", - "refs": [{"name": "model", "package": None, "version": None}], - "relation_name": None, - "resource_type": "test", - "schema": test_audit_schema, - "database": project.database, - "tags": [], - "meta": {}, - "unique_id": "test.test.not_null_model_id.d01cc630e6", - "docs": {"node_color": None, "show": True}, - "compiled": True, - "compiled_code": AnyStringWith("where id is null"), - "extra_ctes_injected": True, - "extra_ctes": [], - "test_metadata": { - "namespace": None, - "name": "not_null", - "kwargs": { - "column_name": "id", - "model": "{{ get_where_subquery(ref('model')) }}", - }, - }, - "checksum": {"name": "none", "checksum": ""}, - "unrendered_config": unrendered_test_config, - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - }, - "snapshot.test.snapshot_seed": { - "alias": "snapshot_seed", - "compiled_path": None, - "build_path": None, - "created_at": ANY, - "checksum": checksum_file(snapshot_path), - "columns": {}, - "compiled": True, - "compiled_code": ANY, - "config": snapshot_config, - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "database": project.database, - "group": None, - "deferred": False, - "depends_on": { - "macros": [], - "nodes": ["seed.test.seed"], - }, - "description": "", - "docs": {"node_color": None, "show": True}, - "extra_ctes": [], - "extra_ctes_injected": True, - "fqn": ["test", "snapshot_seed", "snapshot_seed"], - "metrics": [], - "meta": {}, - "name": "snapshot_seed", - "original_file_path": snapshot_path, - "package_name": "test", - "patch_path": None, - "path": "snapshot_seed.sql", - "raw_code": LineIndifferent( - read_file_replace_returns(snapshot_path) - .replace("{% snapshot snapshot_seed %}", "") - .replace("{% endsnapshot %}", "") - ), - "language": "sql", - "refs": [{"name": "seed", "package": None, "version": None}], - "relation_name": relation_name_node_format.format( - project.database, alternate_schema, "snapshot_seed" - ), - "resource_type": "snapshot", - "schema": alternate_schema, - "sources": [], - "tags": [], - "unique_id": "snapshot.test.snapshot_seed", - "unrendered_config": unrendered_snapshot_config, - }, - "test.test.test_nothing_model_.5d38568946": { - "alias": "test_nothing_model_", - "attached_node": "model.test.model", - "compiled_path": os.path.join( - compiled_model_path, "schema.yml", "test_nothing_model_.sql" - ), - "build_path": None, - "created_at": ANY, - "column_name": None, - "columns": {}, - "config": test_config, - "group": None, - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "sources": [], - "depends_on": { - "macros": ["macro.test.test_nothing", "macro.dbt.get_where_subquery"], - "nodes": ["model.test.model"], - }, - "deferred": False, - "description": "", - "file_key_name": "models.model", - "fqn": ["test", "test_nothing_model_"], - "metrics": [], - "name": "test_nothing_model_", - "original_file_path": model_schema_yml_path, - "package_name": "test", - "patch_path": None, - "path": "test_nothing_model_.sql", - "raw_code": "{{ test.test_nothing(**_dbt_generic_test_kwargs) }}", - "language": "sql", - "refs": [{"name": "model", "package": None, "version": None}], - "relation_name": None, - "resource_type": "test", - "schema": test_audit_schema, - "database": project.database, - "tags": [], - "meta": {}, - "unique_id": "test.test.test_nothing_model_.5d38568946", - "docs": {"node_color": None, "show": True}, - "compiled": True, - "compiled_code": AnyStringWith("select 0"), - "extra_ctes_injected": True, - "extra_ctes": [], - "test_metadata": { - "namespace": "test", - "name": "nothing", - "kwargs": { - "model": "{{ get_where_subquery(ref('model')) }}", - }, - }, - "checksum": {"name": "none", "checksum": ""}, - "unrendered_config": unrendered_test_config, - }, - "test.test.unique_model_id.67b76558ff": { - "alias": "unique_model_id", - "attached_node": "model.test.model", - "compiled_path": os.path.join( - compiled_model_path, "schema.yml", "unique_model_id.sql" - ), - "build_path": None, - "created_at": ANY, - "column_name": "id", - "columns": {}, - "config": test_config, - "group": None, - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "sources": [], - "depends_on": { - "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], - "nodes": ["model.test.model"], - }, - "deferred": False, - "description": "", - "file_key_name": "models.model", - "fqn": ["test", "unique_model_id"], - "metrics": [], - "name": "unique_model_id", - "original_file_path": model_schema_yml_path, - "package_name": "test", - "patch_path": None, - "path": "unique_model_id.sql", - "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", - "language": "sql", - "refs": [{"name": "model", "package": None, "version": None}], - "relation_name": None, - "resource_type": "test", - "schema": test_audit_schema, - "database": project.database, - "tags": [], - "meta": {}, - "unique_id": "test.test.unique_model_id.67b76558ff", - "docs": {"node_color": None, "show": True}, - "compiled": True, - "compiled_code": AnyStringWith("count(*)"), - "extra_ctes_injected": True, - "extra_ctes": [], - "test_metadata": { - "namespace": None, - "name": "unique", - "kwargs": { - "column_name": "id", - "model": "{{ get_where_subquery(ref('model')) }}", - }, - }, - "checksum": {"name": "none", "checksum": ""}, - "unrendered_config": unrendered_test_config, - }, - }, - "sources": { - "source.test.my_source.my_table": { - "created_at": ANY, - "columns": { - "id": { - "description": "An ID field", - "name": "id", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - } - }, - "config": { - "enabled": True, - }, - "quoting": { - "database": None, - "schema": None, - "identifier": True, - "column": None, - }, - "database": project.database, - "description": "My table", - "external": None, - "freshness": { - "error_after": {"count": None, "period": None}, - "warn_after": {"count": None, "period": None}, - "filter": None, - }, - "identifier": "seed", - "loaded_at_field": None, - "loader": "a_loader", - "meta": {}, - "name": "my_table", - "original_file_path": os.path.join("models", "schema.yml"), - "package_name": "test", - "path": os.path.join("models", "schema.yml"), - "patch_path": None, - "relation_name": relation_name_source_format.format( - project.database, my_schema_name, "seed" - ), - "resource_type": "source", - "schema": my_schema_name, - "source_description": "My source", - "source_name": "my_source", - "source_meta": {}, - "tags": [], - "unique_id": "source.test.my_source.my_table", - "fqn": ["test", "my_source", "my_table"], - "unrendered_config": {}, - }, - }, - "exposures": { - "exposure.test.notebook_exposure": { - "created_at": ANY, - "depends_on": { - "macros": [], - "nodes": ["model.test.model", "model.test.second_model"], - }, - "description": "A description of the complex exposure\n", - "label": None, - "config": { - "enabled": True, - }, - "fqn": ["test", "notebook_exposure"], - "maturity": "medium", - "meta": {"tool": "my_tool", "languages": ["python"]}, - "metrics": [], - "tags": ["my_department"], - "name": "notebook_exposure", - "original_file_path": os.path.join("models", "schema.yml"), - "owner": {"email": "something@example.com", "name": "Some name"}, - "package_name": "test", - "path": "schema.yml", - "refs": [ - {"name": "model", "package": None, "version": None}, - {"name": "second_model", "package": None, "version": None}, - ], - "resource_type": "exposure", - "sources": [], - "type": "notebook", - "unique_id": "exposure.test.notebook_exposure", - "url": "http://example.com/notebook/1", - "unrendered_config": {}, - }, - "exposure.test.simple_exposure": { - "created_at": ANY, - "depends_on": { - "macros": [], - "nodes": ["source.test.my_source.my_table", "model.test.model"], - }, - "description": "", - "label": None, - "config": { - "enabled": True, - }, - "fqn": ["test", "simple_exposure"], - "metrics": [], - "name": "simple_exposure", - "original_file_path": os.path.join("models", "schema.yml"), - "owner": { - "email": "something@example.com", - "name": None, - }, - "package_name": "test", - "path": "schema.yml", - "refs": [{"name": "model", "package": None, "version": None}], - "resource_type": "exposure", - "sources": [["my_source", "my_table"]], - "type": "dashboard", - "unique_id": "exposure.test.simple_exposure", - "url": None, - "maturity": None, - "meta": {}, - "tags": [], - "unrendered_config": {}, - }, - }, - "metrics": {}, - "groups": {}, - "selectors": {}, - "parent_map": { - "model.test.model": ["seed.test.seed"], - "model.test.second_model": ["seed.test.seed"], - "exposure.test.notebook_exposure": ["model.test.model", "model.test.second_model"], - "exposure.test.simple_exposure": [ - "model.test.model", - "source.test.my_source.my_table", - ], - "seed.test.seed": [], - "snapshot.test.snapshot_seed": ["seed.test.seed"], - "source.test.my_source.my_table": [], - "test.test.not_null_model_id.d01cc630e6": ["model.test.model"], - "test.test.test_nothing_model_.5d38568946": ["model.test.model"], - "test.test.unique_model_id.67b76558ff": ["model.test.model"], - }, - "child_map": { - "model.test.model": [ - "exposure.test.notebook_exposure", - "exposure.test.simple_exposure", - "test.test.not_null_model_id.d01cc630e6", - "test.test.test_nothing_model_.5d38568946", - "test.test.unique_model_id.67b76558ff", - ], - "model.test.second_model": ["exposure.test.notebook_exposure"], - "exposure.test.notebook_exposure": [], - "exposure.test.simple_exposure": [], - "seed.test.seed": [ - "model.test.model", - "model.test.second_model", - "snapshot.test.snapshot_seed", - ], - "snapshot.test.snapshot_seed": [], - "source.test.my_source.my_table": ["exposure.test.simple_exposure"], - "test.test.not_null_model_id.d01cc630e6": [], - "test.test.test_nothing_model_.5d38568946": [], - "test.test.unique_model_id.67b76558ff": [], - }, - "group_map": {}, - "docs": { - "doc.dbt.__overview__": ANY, - "doc.test.macro_info": ANY, - "doc.test.macro_arg_info": ANY, - }, - "disabled": {}, - "semantic_models": {}, - "unit_tests": {}, - "saved_queries": {}, - } - - -def expected_references_manifest(project): - model_database = project.database - my_schema_name = project.test_schema - docs_path = os.path.join("models", "docs.md") - ephemeral_copy_path = os.path.join("models", "ephemeral_copy.sql") - ephemeral_summary_path = os.path.join("models", "ephemeral_summary.sql") - view_summary_path = os.path.join("models", "view_summary.sql") - seed_path = os.path.join("seeds", "seed.csv") - snapshot_path = os.path.join("snapshots", "snapshot_seed.sql") - compiled_model_path = os.path.join("target", "compiled", "test", "models") - schema_yml_path = os.path.join("models", "schema.yml") - - ephemeral_copy_sql = read_file_replace_returns(ephemeral_copy_path).rstrip("\r\n") - ephemeral_summary_sql = read_file_replace_returns(ephemeral_summary_path).rstrip("\r\n") - view_summary_sql = read_file_replace_returns(view_summary_path).rstrip("\r\n") - alternate_schema = project.test_schema + "_test" - - return { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v7.json", - "dbt_version": dbt.version.__version__, - "nodes": { - "model.test.ephemeral_copy": { - "alias": "ephemeral_copy", - "compiled_path": os.path.join(compiled_model_path, "ephemeral_copy.sql"), - "build_path": None, - "created_at": ANY, - "columns": {}, - "config": get_rendered_model_config(materialized="ephemeral"), - "sources": [["my_source", "my_table"]], - "depends_on": { - "macros": [], - "nodes": ["source.test.my_source.my_table"], - }, - "deprecation_date": None, - "deferred": False, - "description": "", - "docs": {"node_color": None, "show": True}, - "fqn": ["test", "ephemeral_copy"], - "group": None, - "metrics": [], - "name": "ephemeral_copy", - "original_file_path": ephemeral_copy_path, - "package_name": "test", - "patch_path": None, - "path": "ephemeral_copy.sql", - "raw_code": LineIndifferent(ephemeral_copy_sql), - "language": "sql", - "refs": [], - "relation_name": None, - "resource_type": "model", - "schema": my_schema_name, - "database": project.database, - "tags": [], - "meta": {}, - "unique_id": "model.test.ephemeral_copy", - "compiled": True, - "compiled_code": ANY, - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "extra_ctes_injected": True, - "extra_ctes": [], - "checksum": checksum_file(ephemeral_copy_path), - "unrendered_config": get_unrendered_model_config(materialized="ephemeral"), - "access": "protected", - "version": None, - "latest_version": None, - "constraints": [], - }, - "model.test.ephemeral_summary": { - "alias": "ephemeral_summary", - "compiled_path": os.path.join(compiled_model_path, "ephemeral_summary.sql"), - "build_path": None, - "created_at": ANY, - "columns": { - "first_name": { - "description": "The first name being summarized", - "name": "first_name", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "ct": { - "description": "The number of instances of the first name", - "name": "ct", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - }, - "config": get_rendered_model_config(materialized="table", group="test_group"), - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "sources": [], - "depends_on": { - "macros": [], - "nodes": ["model.test.ephemeral_copy"], - }, - "deprecation_date": None, - "deferred": False, - "description": "A summmary table of the ephemeral copy of the seed data", - "docs": {"node_color": None, "show": True}, - "fqn": ["test", "ephemeral_summary"], - "group": "test_group", - "metrics": [], - "name": "ephemeral_summary", - "original_file_path": ephemeral_summary_path, - "package_name": "test", - "patch_path": "test://" + os.path.join("models", "schema.yml"), - "path": "ephemeral_summary.sql", - "raw_code": LineIndifferent(ephemeral_summary_sql), - "language": "sql", - "refs": [{"name": "ephemeral_copy", "package": None, "version": None}], - "relation_name": '"{0}"."{1}".ephemeral_summary'.format( - model_database, my_schema_name - ), - "resource_type": "model", - "schema": my_schema_name, - "database": project.database, - "tags": [], - "meta": {}, - "unique_id": "model.test.ephemeral_summary", - "compiled": True, - "compiled_code": ANY, - "extra_ctes_injected": True, - "extra_ctes": [ANY], - "checksum": checksum_file(ephemeral_summary_path), - "unrendered_config": get_unrendered_model_config( - materialized="table", group="test_group" - ), - "access": "protected", - "version": None, - "latest_version": None, - "constraints": [], - }, - "model.test.view_summary": { - "alias": "view_summary", - "compiled_path": os.path.join(compiled_model_path, "view_summary.sql"), - "build_path": None, - "created_at": ANY, - "columns": { - "first_name": { - "description": "The first name being summarized", - "name": "first_name", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "ct": { - "description": "The number of instances of the first name", - "name": "ct", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - }, - "config": get_rendered_model_config(), - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "database": project.database, - "depends_on": { - "macros": [], - "nodes": ["model.test.ephemeral_summary"], - }, - "deprecation_date": None, - "deferred": False, - "description": "A view of the summary of the ephemeral copy of the seed data", - "docs": {"node_color": None, "show": True}, - "fqn": ["test", "view_summary"], - "group": None, - "metrics": [], - "name": "view_summary", - "original_file_path": view_summary_path, - "package_name": "test", - "patch_path": "test://" + schema_yml_path, - "path": "view_summary.sql", - "raw_code": LineIndifferent(view_summary_sql), - "language": "sql", - "refs": [{"name": "ephemeral_summary", "package": None, "version": None}], - "relation_name": '"{0}"."{1}".view_summary'.format(model_database, my_schema_name), - "resource_type": "model", - "schema": my_schema_name, - "sources": [], - "tags": [], - "meta": {}, - "unique_id": "model.test.view_summary", - "compiled": True, - "compiled_code": ANY, - "extra_ctes_injected": True, - "extra_ctes": [], - "checksum": checksum_file(view_summary_path), - "unrendered_config": get_unrendered_model_config(materialized="view"), - "access": "protected", - "version": None, - "latest_version": None, - "constraints": [], - }, - "seed.test.seed": { - "alias": "seed", - "build_path": None, - "created_at": ANY, - "columns": { - "id": { - "name": "id", - "description": "The user ID number", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "first_name": { - "name": "first_name", - "description": "The user's first name", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "email": { - "name": "email", - "description": "The user's email", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "ip_address": { - "name": "ip_address", - "description": "The user's IP address", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "updated_at": { - "name": "updated_at", - "description": "The last time this user's email was updated", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - }, - "config": get_rendered_seed_config(), - "deferred": False, - "depends_on": {"macros": []}, - "description": "The test seed", - "docs": {"node_color": None, "show": True}, - "fqn": ["test", "seed"], - "group": None, - "name": "seed", - "original_file_path": seed_path, - "package_name": "test", - "patch_path": "test://" + os.path.join("seeds", "schema.yml"), - "path": "seed.csv", - "raw_code": "", - "resource_type": "seed", - "root_path": project.project_root, - "schema": my_schema_name, - "database": project.database, - "tags": [], - "meta": {}, - "unique_id": "seed.test.seed", - "checksum": checksum_file(seed_path), - "unrendered_config": get_unrendered_seed_config(), - "relation_name": '"{0}"."{1}".seed'.format(project.database, my_schema_name), - }, - "snapshot.test.snapshot_seed": { - "alias": "snapshot_seed", - "compiled_path": None, - "build_path": None, - "created_at": ANY, - "checksum": checksum_file(snapshot_path), - "columns": {}, - "compiled": True, - "compiled_code": ANY, - "config": get_rendered_snapshot_config(target_schema=alternate_schema), - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "database": model_database, - "deferred": False, - "depends_on": {"macros": [], "nodes": ["seed.test.seed"]}, - "description": "", - "docs": {"node_color": None, "show": True}, - "extra_ctes": [], - "extra_ctes_injected": True, - "fqn": ["test", "snapshot_seed", "snapshot_seed"], - "group": None, - "metrics": [], - "meta": {}, - "name": "snapshot_seed", - "original_file_path": snapshot_path, - "package_name": "test", - "patch_path": None, - "path": "snapshot_seed.sql", - "raw_code": ANY, - "language": "sql", - "refs": [{"name": "seed", "package": None, "version": None}], - "relation_name": '"{0}"."{1}".snapshot_seed'.format( - model_database, alternate_schema - ), - "resource_type": "snapshot", - "schema": alternate_schema, - "sources": [], - "tags": [], - "unique_id": "snapshot.test.snapshot_seed", - "unrendered_config": get_unrendered_snapshot_config( - target_schema=alternate_schema - ), - }, - }, - "sources": { - "source.test.my_source.my_table": { - "columns": { - "id": { - "description": "An ID field", - "name": "id", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - } - }, - "config": { - "enabled": True, - }, - "quoting": { - "database": False, - "schema": None, - "identifier": True, - "column": None, - }, - "created_at": ANY, - "database": project.database, - "description": "My table", - "external": None, - "freshness": { - "error_after": {"count": None, "period": None}, - "warn_after": {"count": None, "period": None}, - "filter": None, - }, - "identifier": "seed", - "loaded_at_field": None, - "loader": "a_loader", - "meta": {}, - "name": "my_table", - "original_file_path": os.path.join("models", "schema.yml"), - "package_name": "test", - "path": os.path.join("models", "schema.yml"), - "patch_path": None, - "relation_name": '{0}."{1}"."seed"'.format(project.database, my_schema_name), - "resource_type": "source", - "schema": my_schema_name, - "source_description": "My source", - "source_name": "my_source", - "source_meta": {}, - "tags": [], - "unique_id": "source.test.my_source.my_table", - "fqn": ["test", "my_source", "my_table"], - "unrendered_config": {}, - }, - }, - "exposures": { - "exposure.test.notebook_exposure": { - "created_at": ANY, - "depends_on": { - "macros": [], - "nodes": ["model.test.view_summary"], - }, - "description": "A description of the complex exposure", - "label": None, - "config": { - "enabled": True, - }, - "fqn": ["test", "notebook_exposure"], - "maturity": "medium", - "meta": {"tool": "my_tool", "languages": ["python"]}, - "metrics": [], - "tags": ["my_department"], - "name": "notebook_exposure", - "original_file_path": os.path.join("models", "schema.yml"), - "owner": {"email": "something@example.com", "name": "Some name"}, - "package_name": "test", - "path": "schema.yml", - "refs": [{"name": "view_summary", "package": None, "version": None}], - "resource_type": "exposure", - "sources": [], - "type": "notebook", - "unique_id": "exposure.test.notebook_exposure", - "url": "http://example.com/notebook/1", - "unrendered_config": {}, - }, - }, - "metrics": {}, - "groups": { - "group.test.test_group": { - "name": "test_group", - "resource_type": "group", - "original_file_path": os.path.join("models", "schema.yml"), - "owner": {"email": "test_group@test.com", "name": None}, - "package_name": "test", - "path": "schema.yml", - "unique_id": "group.test.test_group", - } - }, - "selectors": {}, - "docs": { - "doc.dbt.__overview__": ANY, - "doc.test.column_info": { - "block_contents": "An ID field", - "resource_type": "doc", - "name": "column_info", - "original_file_path": docs_path, - "package_name": "test", - "path": "docs.md", - "unique_id": "doc.test.column_info", - }, - "doc.test.ephemeral_summary": { - "block_contents": ("A summmary table of the ephemeral copy of the seed data"), - "resource_type": "doc", - "name": "ephemeral_summary", - "original_file_path": docs_path, - "package_name": "test", - "path": "docs.md", - "unique_id": "doc.test.ephemeral_summary", - }, - "doc.test.source_info": { - "block_contents": "My source", - "resource_type": "doc", - "name": "source_info", - "original_file_path": docs_path, - "package_name": "test", - "path": "docs.md", - "unique_id": "doc.test.source_info", - }, - "doc.test.summary_count": { - "block_contents": "The number of instances of the first name", - "resource_type": "doc", - "name": "summary_count", - "original_file_path": docs_path, - "package_name": "test", - "path": "docs.md", - "unique_id": "doc.test.summary_count", - }, - "doc.test.summary_first_name": { - "block_contents": "The first name being summarized", - "resource_type": "doc", - "name": "summary_first_name", - "original_file_path": docs_path, - "package_name": "test", - "path": "docs.md", - "unique_id": "doc.test.summary_first_name", - }, - "doc.test.table_info": { - "block_contents": "My table", - "resource_type": "doc", - "name": "table_info", - "original_file_path": docs_path, - "package_name": "test", - "path": "docs.md", - "unique_id": "doc.test.table_info", - }, - "doc.test.view_summary": { - "block_contents": ("A view of the summary of the ephemeral copy of the seed data"), - "resource_type": "doc", - "name": "view_summary", - "original_file_path": docs_path, - "package_name": "test", - "path": "docs.md", - "unique_id": "doc.test.view_summary", - }, - "doc.test.macro_info": { - "block_contents": "My custom test that I wrote that does nothing", - "resource_type": "doc", - "name": "macro_info", - "original_file_path": os.path.join("macros", "macro.md"), - "package_name": "test", - "path": "macro.md", - "unique_id": "doc.test.macro_info", - }, - "doc.test.notebook_info": { - "block_contents": "A description of the complex exposure", - "resource_type": "doc", - "name": "notebook_info", - "original_file_path": docs_path, - "package_name": "test", - "path": "docs.md", - "unique_id": "doc.test.notebook_info", - }, - "doc.test.macro_arg_info": { - "block_contents": "The model for my custom test", - "resource_type": "doc", - "name": "macro_arg_info", - "original_file_path": os.path.join("macros", "macro.md"), - "package_name": "test", - "path": "macro.md", - "unique_id": "doc.test.macro_arg_info", - }, - }, - "child_map": { - "model.test.ephemeral_copy": ["model.test.ephemeral_summary"], - "exposure.test.notebook_exposure": [], - "model.test.ephemeral_summary": ["model.test.view_summary"], - "model.test.view_summary": ["exposure.test.notebook_exposure"], - "seed.test.seed": ["snapshot.test.snapshot_seed"], - "snapshot.test.snapshot_seed": [], - "source.test.my_source.my_table": ["model.test.ephemeral_copy"], - }, - "parent_map": { - "model.test.ephemeral_copy": ["source.test.my_source.my_table"], - "model.test.ephemeral_summary": ["model.test.ephemeral_copy"], - "model.test.view_summary": ["model.test.ephemeral_summary"], - "exposure.test.notebook_exposure": ["model.test.view_summary"], - "seed.test.seed": [], - "snapshot.test.snapshot_seed": ["seed.test.seed"], - "source.test.my_source.my_table": [], - }, - "group_map": {"test_group": ["model.test.ephemeral_summary"]}, - "disabled": {}, - "macros": { - "macro.test.test_nothing": { - "name": "test_nothing", - "depends_on": {"macros": []}, - "created_at": ANY, - "description": "My custom test that I wrote that does nothing", - "docs": {"node_color": None, "show": True}, - "macro_sql": AnyStringWith("test nothing"), - "original_file_path": os.path.join("macros", "dummy_test.sql"), - "path": os.path.join("macros", "dummy_test.sql"), - "package_name": "test", - "meta": { - "some_key": 100, - }, - "patch_path": "test://" + os.path.join("macros", "schema.yml"), - "resource_type": "macro", - "unique_id": "macro.test.test_nothing", - "supported_languages": None, - "arguments": [ - { - "name": "model", - "type": "Relation", - "description": "The model for my custom test", - }, - ], - } - }, - "semantic_models": {}, - "unit_tests": {}, - "saved_queries": {}, - } - - -def expected_versions_manifest(project): - model_database = project.database - my_schema_name = project.test_schema - - versioned_model_v1_path = os.path.join("models", "arbitrary_file_name.sql") - versioned_model_v2_path = os.path.join("models", "versioned_model_v2.sql") - ref_versioned_model_path = os.path.join("models", "ref_versioned_model.sql") - compiled_model_path = os.path.join("target", "compiled", "test", "models") - schema_yml_path = os.path.join("models", "schema.yml") - - versioned_model_v1_sql = read_file_replace_returns(versioned_model_v1_path).rstrip("\r\n") - versioned_model_v2_sql = read_file_replace_returns(versioned_model_v2_path).rstrip("\r\n") - ref_versioned_model_sql = read_file_replace_returns(ref_versioned_model_path).rstrip("\r\n") - - test_config = get_rendered_tst_config() - unrendered_test_config = get_unrendered_tst_config() - test_audit_schema = my_schema_name + "_dbt_test__audit" - model_schema_yml_path = os.path.join("models", "schema.yml") - - return { - "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v7.json", - "dbt_version": dbt.version.__version__, - "nodes": { - "model.test.versioned_model.v1": { - "alias": "versioned_model_v1", - "compiled_path": os.path.join(compiled_model_path, "arbitrary_file_name.sql"), - "build_path": None, - "created_at": ANY, - "columns": { - "first_name": { - "description": "The first name being summarized", - "name": "first_name", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "ct": { - "description": "The number of instances of the first name", - "name": "ct", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - }, - "config": get_rendered_model_config( - materialized="table", - group="test_group", - meta={"size": "large", "color": "blue"}, - ), - "constraints": [], - "sources": [], - "depends_on": {"macros": [], "nodes": []}, - "deferred": False, - "description": "A versioned model", - "deprecation_date": ANY, - "docs": {"node_color": None, "show": True}, - "fqn": ["test", "versioned_model", "v1"], - "group": "test_group", - "metrics": [], - "name": "versioned_model", - "original_file_path": versioned_model_v1_path, - "package_name": "test", - "patch_path": "test://" + os.path.join("models", "schema.yml"), - "path": "arbitrary_file_name.sql", - "raw_code": LineIndifferent(versioned_model_v1_sql), - "language": "sql", - "refs": [], - "relation_name": '"{0}"."{1}".versioned_model_v1'.format( - model_database, my_schema_name - ), - "resource_type": "model", - "schema": my_schema_name, - "database": project.database, - "tags": [], - "meta": {"size": "large", "color": "blue"}, - "unique_id": "model.test.versioned_model.v1", - "compiled": True, - "compiled_code": ANY, - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "extra_ctes_injected": True, - "extra_ctes": [], - "checksum": checksum_file(versioned_model_v1_path), - "unrendered_config": get_unrendered_model_config( - materialized="table", - group="test_group", - meta={"size": "large", "color": "blue"}, - ), - "access": "protected", - "version": 1, - "latest_version": 2, - }, - "model.test.versioned_model.v2": { - "alias": "versioned_model_v2", - "compiled_path": os.path.join(compiled_model_path, "versioned_model_v2.sql"), - "build_path": None, - "created_at": ANY, - "columns": { - "first_name": { - "description": "The first name being summarized", - "name": "first_name", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - "extra": { - "description": "", - "name": "extra", - "data_type": None, - "meta": {}, - "quote": None, - "tags": [], - "constraints": [], - }, - }, - "config": get_rendered_model_config( - materialized="view", group="test_group", meta={"size": "large", "color": "red"} - ), - "constraints": [], - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "sources": [], - "depends_on": {"macros": [], "nodes": []}, - "deferred": False, - "description": "A versioned model", - "deprecation_date": None, - "docs": {"node_color": None, "show": True}, - "fqn": ["test", "versioned_model", "v2"], - "group": "test_group", - "metrics": [], - "name": "versioned_model", - "original_file_path": versioned_model_v2_path, - "package_name": "test", - "patch_path": "test://" + os.path.join("models", "schema.yml"), - "path": "versioned_model_v2.sql", - "raw_code": LineIndifferent(versioned_model_v2_sql), - "language": "sql", - "refs": [], - "relation_name": '"{0}"."{1}".versioned_model_v2'.format( - model_database, my_schema_name - ), - "resource_type": "model", - "schema": my_schema_name, - "database": project.database, - "tags": [], - "meta": {"size": "large", "color": "red"}, - "unique_id": "model.test.versioned_model.v2", - "compiled": True, - "compiled_code": ANY, - "extra_ctes_injected": True, - "extra_ctes": [], - "checksum": checksum_file(versioned_model_v2_path), - "unrendered_config": get_unrendered_model_config( - materialized="view", group="test_group", meta={"size": "large", "color": "red"} - ), - "access": "protected", - "version": 2, - "latest_version": 2, - }, - "model.test.ref_versioned_model": { - "alias": "ref_versioned_model", - "compiled_path": os.path.join(compiled_model_path, "ref_versioned_model.sql"), - "build_path": None, - "created_at": ANY, - "columns": {}, - "config": get_rendered_model_config(), - "constraints": [], - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "database": project.database, - "depends_on": { - "macros": [], - "nodes": [ - "model.test.versioned_model.v2", - "model.test.versioned_model.v1", - ], - }, - "deprecation_date": None, - "deferred": False, - "description": "", - "docs": {"node_color": None, "show": True}, - "fqn": ["test", "ref_versioned_model"], - "group": None, - "metrics": [], - "name": "ref_versioned_model", - "original_file_path": ref_versioned_model_path, - "package_name": "test", - "patch_path": "test://" + schema_yml_path, - "path": "ref_versioned_model.sql", - "raw_code": LineIndifferent(ref_versioned_model_sql), - "language": "sql", - "refs": [ - {"name": "versioned_model", "package": None, "version": 2}, - {"name": "versioned_model", "package": None, "version": "2"}, - {"name": "versioned_model", "package": None, "version": 2}, - {"name": "versioned_model", "package": None, "version": None}, - {"name": "versioned_model", "package": None, "version": 1}, - ], - "relation_name": '"{0}"."{1}".ref_versioned_model'.format( - model_database, my_schema_name - ), - "resource_type": "model", - "schema": my_schema_name, - "sources": [], - "tags": [], - "meta": {}, - "unique_id": "model.test.ref_versioned_model", - "compiled": True, - "compiled_code": ANY, - "extra_ctes_injected": True, - "extra_ctes": [], - "checksum": checksum_file(ref_versioned_model_path), - "unrendered_config": get_unrendered_model_config(), - "access": "protected", - "version": None, - "latest_version": None, - }, - "test.test.unique_versioned_model_v1_first_name.6138195dec": { - "alias": "unique_versioned_model_v1_first_name", - "attached_node": "model.test.versioned_model.v1", - "compiled_path": os.path.join( - compiled_model_path, "schema.yml", "unique_versioned_model_v1_first_name.sql" - ), - "build_path": None, - "created_at": ANY, - "column_name": "first_name", - "columns": {}, - "config": test_config, - "group": "test_group", - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "sources": [], - "depends_on": { - "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], - "nodes": ["model.test.versioned_model.v1"], - }, - "deferred": False, - "description": "", - "file_key_name": "models.versioned_model", - "fqn": ["test", "unique_versioned_model_v1_first_name"], - "metrics": [], - "name": "unique_versioned_model_v1_first_name", - "original_file_path": model_schema_yml_path, - "package_name": "test", - "patch_path": None, - "path": "unique_versioned_model_v1_first_name.sql", - "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", - "language": "sql", - "refs": [{"name": "versioned_model", "package": None, "version": 1}], - "relation_name": None, - "resource_type": "test", - "schema": test_audit_schema, - "database": project.database, - "tags": [], - "meta": {}, - "unique_id": "test.test.unique_versioned_model_v1_first_name.6138195dec", - "docs": {"node_color": None, "show": True}, - "compiled": True, - "compiled_code": AnyStringWith("count(*)"), - "extra_ctes_injected": True, - "extra_ctes": [], - "test_metadata": { - "namespace": None, - "name": "unique", - "kwargs": { - "column_name": "first_name", - "model": "{{ get_where_subquery(ref('versioned_model', version='1')) }}", - }, - }, - "checksum": {"name": "none", "checksum": ""}, - "unrendered_config": unrendered_test_config, - }, - "test.test.unique_versioned_model_v1_count.0b4c0b688a": { - "alias": "unique_versioned_model_v1_count", - "attached_node": "model.test.versioned_model.v1", - "compiled_path": os.path.join( - compiled_model_path, "schema.yml", "unique_versioned_model_v1_count.sql" - ), - "build_path": None, - "created_at": ANY, - "column_name": None, - "columns": {}, - "config": test_config, - "group": "test_group", - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "sources": [], - "depends_on": { - "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], - "nodes": ["model.test.versioned_model.v1"], - }, - "deferred": False, - "description": "", - "file_key_name": "models.versioned_model", - "fqn": ["test", "unique_versioned_model_v1_count"], - "metrics": [], - "name": "unique_versioned_model_v1_count", - "original_file_path": model_schema_yml_path, - "package_name": "test", - "patch_path": None, - "path": "unique_versioned_model_v1_count.sql", - "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", - "language": "sql", - "refs": [{"name": "versioned_model", "package": None, "version": 1}], - "relation_name": None, - "resource_type": "test", - "schema": test_audit_schema, - "database": project.database, - "tags": [], - "meta": {}, - "unique_id": "test.test.unique_versioned_model_v1_count.0b4c0b688a", - "docs": {"node_color": None, "show": True}, - "compiled": True, - "compiled_code": AnyStringWith("count(*)"), - "extra_ctes_injected": True, - "extra_ctes": [], - "test_metadata": { - "namespace": None, - "name": "unique", - "kwargs": { - "column_name": "count", - "model": "{{ get_where_subquery(ref('versioned_model', version='1')) }}", - }, - }, - "checksum": {"name": "none", "checksum": ""}, - "unrendered_config": unrendered_test_config, - }, - "test.test.unique_versioned_model_v2_first_name.998430d28e": { - "alias": "unique_versioned_model_v2_first_name", - "attached_node": "model.test.versioned_model.v2", - "compiled_path": os.path.join( - compiled_model_path, "schema.yml", "unique_versioned_model_v2_first_name.sql" - ), - "build_path": None, - "created_at": ANY, - "column_name": "first_name", - "columns": {}, - "config": test_config, - "group": "test_group", - "contract": {"checksum": None, "enforced": False, "alias_types": True}, - "sources": [], - "depends_on": { - "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], - "nodes": ["model.test.versioned_model.v2"], - }, - "deferred": False, - "description": "", - "file_key_name": "models.versioned_model", - "fqn": ["test", "unique_versioned_model_v2_first_name"], - "metrics": [], - "name": "unique_versioned_model_v2_first_name", - "original_file_path": model_schema_yml_path, - "package_name": "test", - "patch_path": None, - "path": "unique_versioned_model_v2_first_name.sql", - "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", - "language": "sql", - "refs": [{"name": "versioned_model", "package": None, "version": 2}], - "relation_name": None, - "resource_type": "test", - "schema": test_audit_schema, - "database": project.database, - "tags": [], - "meta": {}, - "unique_id": "test.test.unique_versioned_model_v2_first_name.998430d28e", - "docs": {"node_color": None, "show": True}, - "compiled": True, - "compiled_code": AnyStringWith("count(*)"), - "extra_ctes_injected": True, - "extra_ctes": [], - "test_metadata": { - "namespace": None, - "name": "unique", - "kwargs": { - "column_name": "first_name", - "model": "{{ get_where_subquery(ref('versioned_model', version='2')) }}", - }, - }, - "checksum": {"name": "none", "checksum": ""}, - "unrendered_config": unrendered_test_config, - }, - }, - "exposures": { - "exposure.test.notebook_exposure": { - "created_at": ANY, - "depends_on": { - "macros": [], - "nodes": ["model.test.versioned_model.v2"], - }, - "description": "notebook_info", - "label": None, - "config": { - "enabled": True, - }, - "fqn": ["test", "notebook_exposure"], - "maturity": None, - "meta": {}, - "metrics": [], - "tags": [], - "name": "notebook_exposure", - "original_file_path": os.path.join("models", "schema.yml"), - "owner": {"email": "something@example.com", "name": "Some name"}, - "package_name": "test", - "path": "schema.yml", - "refs": [{"name": "versioned_model", "package": None, "version": 2}], - "resource_type": "exposure", - "sources": [], - "type": "notebook", - "unique_id": "exposure.test.notebook_exposure", - "url": None, - "unrendered_config": {}, - }, - }, - "metrics": {}, - "groups": { - "group.test.test_group": { - "name": "test_group", - "resource_type": "group", - "original_file_path": os.path.join("models", "schema.yml"), - "owner": {"email": "test_group@test.com", "name": None}, - "package_name": "test", - "path": "schema.yml", - "unique_id": "group.test.test_group", - } - }, - "sources": {}, - "selectors": {}, - "docs": {}, - "child_map": { - "model.test.versioned_model.v1": [ - "model.test.ref_versioned_model", - "test.test.unique_versioned_model_v1_count.0b4c0b688a", - "test.test.unique_versioned_model_v1_first_name.6138195dec", - ], - "model.test.versioned_model.v2": [ - "exposure.test.notebook_exposure", - "model.test.ref_versioned_model", - "test.test.unique_versioned_model_v2_first_name.998430d28e", - ], - "model.test.ref_versioned_model": [], - "exposure.test.notebook_exposure": [], - "test.test.unique_versioned_model_v1_first_name.6138195dec": [], - "test.test.unique_versioned_model_v1_count.0b4c0b688a": [], - "test.test.unique_versioned_model_v2_first_name.998430d28e": [], - }, - "parent_map": { - "model.test.versioned_model.v1": [], - "model.test.versioned_model.v2": [], - "model.test.ref_versioned_model": [ - "model.test.versioned_model.v1", - "model.test.versioned_model.v2", - ], - "exposure.test.notebook_exposure": ["model.test.versioned_model.v2"], - "test.test.unique_versioned_model_v1_first_name.6138195dec": [ - "model.test.versioned_model.v1" - ], - "test.test.unique_versioned_model_v1_count.0b4c0b688a": [ - "model.test.versioned_model.v1" - ], - "test.test.unique_versioned_model_v2_first_name.998430d28e": [ - "model.test.versioned_model.v2" - ], - }, - "group_map": { - "test_group": [ - "model.test.versioned_model.v1", - "model.test.versioned_model.v2", - "test.test.unique_versioned_model_v1_first_name.6138195dec", - "test.test.unique_versioned_model_v1_count.0b4c0b688a", - "test.test.unique_versioned_model_v2_first_name.998430d28e", - ] - }, - "disabled": {}, - "macros": {}, - "semantic_models": {}, - "unit_tests": {}, - "saved_queries": {}, - } diff --git a/tests/functional/artifacts/expected_run_results.py b/tests/functional/artifacts/expected_run_results.py deleted file mode 100644 index 3a3148eba..000000000 --- a/tests/functional/artifacts/expected_run_results.py +++ /dev/null @@ -1,243 +0,0 @@ -from unittest.mock import ANY - -from dbt.tests.util import AnyFloat - - -def expected_run_results(): - """ - The expected results of this run. - """ - - return [ - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "model.test.model", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "model.test.second_model", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "seed.test.seed", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": None, - "compiled_code": ANY, - "relation_name": None, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "snapshot.test.snapshot_seed", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "test.test.not_null_model_id.d01cc630e6", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": None, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "test.test.test_nothing_model_.5d38568946", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": None, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "test.test.unique_model_id.67b76558ff", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": None, - }, - ] - - -def expected_references_run_results(): - return [ - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "model.test.ephemeral_summary", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "model.test.view_summary", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "seed.test.seed", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": None, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "snapshot.test.snapshot_seed", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - ] - - -def expected_versions_run_results(): - return [ - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "model.test.ref_versioned_model", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "model.test.versioned_model.v1", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "model.test.versioned_model.v2", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "test.test.unique_versioned_model_v1_count.0b4c0b688a", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "test.test.unique_versioned_model_v1_first_name.6138195dec", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - { - "status": "success", - "message": None, - "execution_time": AnyFloat(), - "unique_id": "test.test.unique_versioned_model_v2_first_name.998430d28e", - "adapter_response": ANY, - "thread_id": ANY, - "timing": [ANY, ANY], - "failures": ANY, - "compiled": True, - "compiled_code": ANY, - "relation_name": ANY, - }, - ] diff --git a/tests/functional/artifacts/test_artifact_fields.py b/tests/functional/artifacts/test_artifact_fields.py deleted file mode 100644 index cbc679358..000000000 --- a/tests/functional/artifacts/test_artifact_fields.py +++ /dev/null @@ -1,52 +0,0 @@ -from dbt.tests.util import get_artifact, get_manifest -import pytest - -from tests.functional.utils import run_dbt - - -# This is a place to put specific tests for contents of artifacts that we -# don't want to bother putting in the big artifact output test, which is -# hard to update. - - -my_model_sql = "select 1 as fun" - -schema_yml = """ -version: 2 -models: - - name: my_model - columns: - - name: fun - data_tests: - - not_null -""" - - -class TestRelationNameInTests: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_sql, - "schema.yml": schema_yml, - } - - def test_relation_name_in_tests(self, project): - results = run_dbt(["run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - test_id = "test.test.not_null_my_model_fun.bf3b032a01" - assert test_id in manifest.nodes - assert manifest.nodes[test_id].relation_name is None - - results = run_dbt(["test", "--store-failures"]) - assert len(results) == 1 - # The relation_name for tests with previously generated manifest and - # store_failures passed in on the command line, will be in the manifest.json - # but not in the parsed manifest. - manifest = get_manifest(project.project_root) - assert manifest.nodes[test_id].relation_name is None - manifest_json = get_artifact(project.project_root, "target", "manifest.json") - assert test_id in manifest_json["nodes"] - relation_name = manifest_json["nodes"][test_id]["relation_name"] - assert relation_name - assert '"not_null_my_model_fun"' in relation_name diff --git a/tests/functional/artifacts/test_artifacts.py b/tests/functional/artifacts/test_artifacts.py deleted file mode 100644 index ca2c03f04..000000000 --- a/tests/functional/artifacts/test_artifacts.py +++ /dev/null @@ -1,621 +0,0 @@ -import jsonschema -import os - -import dbt -from dbt.artifacts.schemas.results import RunStatus -from dbt.artifacts.schemas.run import RunResultsArtifact -from dbt.contracts.graph.manifest import WritableManifest -from dbt.tests.util import ( - check_datetime_between, - get_artifact, -) -import pytest - -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -models__schema_yml = """ -version: 2 - -models: - - name: model - description: "The test model" - docs: - show: false - columns: - - name: id - description: The user ID number - data_tests: - - unique - - not_null - - name: first_name - description: The user's first name - - name: email - description: The user's email - - name: ip_address - description: The user's IP address - - name: updated_at - description: The last time this user's email was updated - data_tests: - - test.nothing - - - name: second_model - description: "The second test model" - docs: - show: false - columns: - - name: id - description: The user ID number - - name: first_name - description: The user's first name - - name: email - description: The user's email - - name: ip_address - description: The user's IP address - - name: updated_at - description: The last time this user's email was updated - - -sources: - - name: my_source - description: "My source" - loader: a_loader - schema: "{{ var('test_schema') }}" - tables: - - name: my_table - description: "My table" - identifier: seed - quoting: - identifier: True - columns: - - name: id - description: "An ID field" - - -exposures: - - name: simple_exposure - type: dashboard - depends_on: - - ref('model') - - source('my_source', 'my_table') - owner: - email: something@example.com - - name: notebook_exposure - type: notebook - depends_on: - - ref('model') - - ref('second_model') - owner: - email: something@example.com - name: Some name - description: > - A description of the complex exposure - maturity: medium - meta: - tool: 'my_tool' - languages: - - python - tags: ['my_department'] - url: http://example.com/notebook/1 -""" - -models__second_model_sql = """ -{{ - config( - materialized='view', - schema='test', - ) -}} - -select * from {{ ref('seed') }} -""" - -models__readme_md = """ -This is a readme.md file with {{ invalid-ish jinja }} in it -""" - -models__model_sql = """ -{{ - config( - materialized='view', - ) -}} - -select * from {{ ref('seed') }} -""" - -models__model_with_pre_hook_sql = """ -{{ - config( - pre_hook={ - "sql": "{{ alter_timezone(timezone='Etc/UTC') }}" - } - ) -}} -select current_setting('timezone') as timezone -""" - -seed__schema_yml = """ -version: 2 -seeds: - - name: seed - description: "The test seed" - columns: - - name: id - description: The user ID number - - name: first_name - description: The user's first name - - name: email - description: The user's email - - name: ip_address - description: The user's IP address - - name: updated_at - description: The last time this user's email was updated -""" - -seed__seed_csv = """id,first_name,email,ip_address,updated_at -1,Larry,lking0@miitbeian.gov.cn,69.135.206.194,2008-09-12 19:08:31 -""" - -macros__schema_yml = """ -version: 2 -macros: - - name: test_nothing - description: "{{ doc('macro_info') }}" - meta: - some_key: 100 - arguments: - - name: model - type: Relation - description: "{{ doc('macro_arg_info') }}" -""" - -macros__macro_md = """ -{% docs macro_info %} -My custom test that I wrote that does nothing -{% enddocs %} - -{% docs macro_arg_info %} -The model for my custom test -{% enddocs %} -""" - -macros__dummy_test_sql = """ -{% test nothing(model) %} - --- a silly test to make sure that table-level tests show up in the manifest --- without a column_name field -select 0 - -{% endtest %} -""" - -macros__alter_timezone_sql = """ -{% macro alter_timezone(timezone='America/Los_Angeles') %} -{% set sql %} - SET TimeZone='{{ timezone }}'; -{% endset %} - -{% do run_query(sql) %} -{% do log("Timezone set to: " + timezone, info=True) %} -{% endmacro %} -""" - -snapshot__snapshot_seed_sql = """ -{% snapshot snapshot_seed %} -{{ - config( - unique_key='id', - strategy='check', - check_cols='all', - target_schema=var('alternate_schema') - ) -}} -select * from {{ ref('seed') }} -{% endsnapshot %} -""" - -ref_models__schema_yml = """ -version: 2 - -groups: - - name: test_group - owner: - email: test_group@test.com - -models: - - name: ephemeral_summary - description: "{{ doc('ephemeral_summary') }}" - config: - group: test_group - columns: &summary_columns - - name: first_name - description: "{{ doc('summary_first_name') }}" - - name: ct - description: "{{ doc('summary_count') }}" - - name: view_summary - description: "{{ doc('view_summary') }}" - columns: *summary_columns - -sources: - - name: my_source - description: "{{ doc('source_info') }}" - loader: a_loader - schema: "{{ var('test_schema') }}" - quoting: - database: False - identifier: False - tables: - - name: my_table - description: "{{ doc('table_info') }}" - identifier: seed - quoting: - identifier: True - columns: - - name: id - description: "{{ doc('column_info') }}" - -exposures: - - name: notebook_exposure - type: notebook - depends_on: - - ref('view_summary') - owner: - email: something@example.com - name: Some name - description: "{{ doc('notebook_info') }}" - maturity: medium - url: http://example.com/notebook/1 - meta: - tool: 'my_tool' - languages: - - python - tags: ['my_department'] - -""" - -ref_models__view_summary_sql = """ -{{ - config( - materialized = "view" - ) -}} - -select first_name, ct from {{ref('ephemeral_summary')}} -order by ct asc - -""" - -ref_models__ephemeral_summary_sql = """ -{{ - config( - materialized = "table" - ) -}} - -select first_name, count(*) as ct from {{ref('ephemeral_copy')}} -group by first_name -order by first_name asc - -""" - -ref_models__ephemeral_copy_sql = """ -{{ - config( - materialized = "ephemeral" - ) -}} - -select * from {{ source("my_source", "my_table") }} - -""" - -ref_models__docs_md = """ -{% docs ephemeral_summary %} -A summmary table of the ephemeral copy of the seed data -{% enddocs %} - -{% docs summary_first_name %} -The first name being summarized -{% enddocs %} - -{% docs summary_count %} -The number of instances of the first name -{% enddocs %} - -{% docs view_summary %} -A view of the summary of the ephemeral copy of the seed data -{% enddocs %} - -{% docs source_info %} -My source -{% enddocs %} - -{% docs table_info %} -My table -{% enddocs %} - -{% docs column_info %} -An ID field -{% enddocs %} - -{% docs notebook_info %} -A description of the complex exposure -{% enddocs %} - -""" - -versioned_models__schema_yml = """ -version: 2 - -groups: - - name: test_group - owner: - email: test_group@test.com - -models: - - name: versioned_model - description: "A versioned model" - latest_version: 2 - config: - group: test_group - materialized: table - meta: - color: blue - size: large - data_tests: - - unique: - column_name: count - columns: - - name: first_name - description: "The first name being summarized" - data_tests: - - unique - - name: ct - description: "The number of instances of the first name" - versions: - - v: 1 - defined_in: arbitrary_file_name - deprecation_date: 2022-07-11 - - v: 2 - config: - materialized: view - meta: - color: red - data_tests: [] - columns: - - include: '*' - exclude: ['ct'] - - name: extra - - name: ref_versioned_model - -exposures: - - name: notebook_exposure - type: notebook - depends_on: - - ref('versioned_model', v=2) - owner: - email: something@example.com - name: Some name - description: "notebook_info" -""" - -versioned_models__v1_sql = """ -select "test first name" as first_name, 1 as ct -""" - -versioned_models__v2_sql = """ -select "test first name" as first_name, 1 as extra -""" - -versioned_models___ref_sql = """ -select first_name from {{ ref("versioned_model", version=2) }} -UNION ALL -select first_name from {{ ref("versioned_model", version="2") }} -UNION ALL -select first_name from {{ ref("versioned_model", v=2) }} -UNION ALL -select first_name from {{ ref("versioned_model") }} -UNION ALL -select first_name from {{ ref("versioned_model", version=1) }} -""" - - -def verify_metadata(metadata, dbt_schema_version, start_time): - assert "generated_at" in metadata - check_datetime_between(metadata["generated_at"], start=start_time) - assert "dbt_version" in metadata - assert metadata["dbt_version"] == dbt.version.__version__ - assert "dbt_schema_version" in metadata - assert metadata["dbt_schema_version"] == dbt_schema_version - key = "env_key" - if os.name == "nt": - key = key.upper() - assert metadata["env"] == {key: "env_value"} - - -def verify_manifest(project, expected_manifest, start_time, manifest_schema_path): - manifest_path = os.path.join(project.project_root, "target", "manifest.json") - assert os.path.exists(manifest_path) - manifest = get_artifact(manifest_path) - # Verify that manifest jsonschema from WritableManifest works - manifest_schema = WritableManifest.json_schema() - validate(manifest_schema, manifest) - - # Verify that stored manifest jsonschema works. - # If this fails, schemas need to be updated with: - # scripts/collect-artifact-schema.py --path schemas --artifact manifest - stored_manifest_schema = get_artifact(manifest_schema_path) - validate(stored_manifest_schema, manifest) - - manifest_keys = { - "nodes", - "sources", - "macros", - "parent_map", - "child_map", - "group_map", - "metrics", - "groups", - "docs", - "metadata", - "docs", - "disabled", - "exposures", - "selectors", - "semantic_models", - "unit_tests", - "saved_queries", - } - - assert set(manifest.keys()) == manifest_keys - - for key in manifest_keys: - if key == "macros": - verify_manifest_macros(manifest, expected_manifest.get("macros")) - elif key == "metadata": - metadata = manifest["metadata"] - dbt_schema_version = str(WritableManifest.dbt_schema_version) - verify_metadata(metadata, dbt_schema_version, start_time) - assert ( - "project_id" in metadata - and metadata["project_id"] == "098f6bcd4621d373cade4e832627b4f6" - ) - assert "project_name" in metadata and metadata["project_name"] == "test" - assert ( - "send_anonymous_usage_stats" in metadata - and metadata["send_anonymous_usage_stats"] is False - ) - assert "adapter_type" in metadata and metadata["adapter_type"] == project.adapter_type - elif key in ["nodes", "sources", "exposures", "metrics", "disabled", "docs"]: - for unique_id, node in expected_manifest[key].items(): - assert unique_id in manifest[key] - assert manifest[key][unique_id] == node, f"{unique_id} did not match" - else: # ['docs', 'parent_map', 'child_map', 'group_map', 'selectors', 'semantic_models', 'saved_queries'] - assert manifest[key] == expected_manifest[key] - - -def verify_manifest_macros(manifest, expected=None): - assert "macros" in manifest - if expected: - for unique_id, expected_macro in expected.items(): - assert unique_id in manifest["macros"] - actual_macro = manifest["macros"][unique_id] - assert expected_macro == actual_macro - - -def verify_run_results(project, expected_run_results, start_time, run_results_schema_path): - run_results_path = os.path.join(project.project_root, "target", "run_results.json") - run_results = get_artifact(run_results_path) - assert "metadata" in run_results - - # Verify that jsonschema for RunResultsArtifact works - run_results_schema = RunResultsArtifact.json_schema() - validate(run_results_schema, run_results) - - # Verify that stored run_results jsonschema works. - # If this fails, schemas need to be updated with: - # scripts/collect-artifact-schema.py --path schemas --artifact run-results - stored_run_results_schema = get_artifact(run_results_schema_path) - validate(stored_run_results_schema, run_results) - - dbt_schema_version = str(RunResultsArtifact.dbt_schema_version) - verify_metadata(run_results["metadata"], dbt_schema_version, start_time) - assert "elapsed_time" in run_results - assert run_results["elapsed_time"] > 0 - assert isinstance(run_results["elapsed_time"], float) - assert "args" in run_results - # sort the results so we can make reasonable assertions - run_results["results"].sort(key=lambda r: r["unique_id"]) - assert run_results["results"] == expected_run_results - assert set(run_results) == {"elapsed_time", "results", "metadata", "args"} - - -class BaseVerifyProject: - @pytest.fixture(scope="class", autouse=True) - def setup(self, project): - alternate_schema_name = project.test_schema + "_test" - project.create_test_schema(schema_name=alternate_schema_name) - os.environ["DBT_ENV_CUSTOM_ENV_env_key"] = "env_value" - run_dbt(["seed"]) - yield - del os.environ["DBT_ENV_CUSTOM_ENV_env_key"] - - @pytest.fixture(scope="class") - def seeds(self): - return {"schema.yml": seed__schema_yml, "seed.csv": seed__seed_csv} - - @pytest.fixture(scope="class") - def macros(self): - return { - "schema.yml": macros__schema_yml, - "macro.md": macros__macro_md, - "dummy_test.sql": macros__dummy_test_sql, - } - - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot_seed.sql": snapshot__snapshot_seed_sql} - - @pytest.fixture(scope="class") - def project_config_update(self, unique_schema): - alternate_schema = unique_schema + "_test" - return { - "vars": { - "test_schema": unique_schema, - "alternate_schema": alternate_schema, - }, - "seeds": { - "quote_columns": True, - }, - "quoting": {"identifier": False}, - } - - @pytest.fixture(scope="class") - def manifest_schema_path(self, request): - schema_version_paths = WritableManifest.dbt_schema_version.path.split("/") - manifest_schema_path = os.path.join( - request.config.rootdir, "schemas", *schema_version_paths - ) - return manifest_schema_path - - @pytest.fixture(scope="class") - def run_results_schema_path(self, request): - schema_version_paths = RunResultsArtifact.dbt_schema_version.path.split("/") - run_results_schema_path = os.path.join( - request.config.rootdir, "schemas", *schema_version_paths - ) - return run_results_schema_path - - -def validate(artifact_schema, artifact_dict): - validator = jsonschema.Draft7Validator(artifact_schema) - error = next(iter(validator.iter_errors(artifact_dict)), None) - assert error is None - - -class TestVerifyRunOperation(BaseVerifyProject): - @pytest.fixture(scope="class") - def macros(self): - return {"alter_timezone.sql": macros__alter_timezone_sql} - - @pytest.fixture(scope="class") - def models(self): - return { - "model_with_pre_hook.sql": models__model_with_pre_hook_sql, - } - - def test_run_operation(self, project): - results, log_output = run_dbt_and_capture(["run-operation", "alter_timezone"]) - assert len(results) == 1 - assert results[0].status == RunStatus.Success - assert results[0].unique_id == "macro.test.alter_timezone" - assert "Timezone set to: America/Los_Angeles" in log_output - - def test_run_model_with_operation(self, project): - # pre-hooks are not included in run_results since they are an attribute of the node and not a node in their - # own right - results, log_output = run_dbt_and_capture(["run", "--select", "model_with_pre_hook"]) - assert len(results) == 1 - assert results[0].status == RunStatus.Success - assert "Timezone set to: Etc/UTC" in log_output diff --git a/tests/functional/artifacts/test_docs_generate_defer.py b/tests/functional/artifacts/test_docs_generate_defer.py deleted file mode 100644 index 1357f3847..000000000 --- a/tests/functional/artifacts/test_docs_generate_defer.py +++ /dev/null @@ -1,43 +0,0 @@ -import os -import shutil - -import pytest - -from tests.functional.utils import run_dbt - - -model_sql = """ -select 1 as id -""" - - -class TestDocsGenerateDefer: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": model_sql} - - def copy_state(self): - assert not os.path.exists("state") - os.makedirs("state") - shutil.copyfile("target/manifest.json", "state/manifest.json") - - def test_generate_defer( - self, - project, - ): - results = run_dbt(["run"]) - assert len(results) == 1 - - # copy state files - self.copy_state() - - # defer test, it succeeds - catalog = run_dbt(["docs", "generate", "--state", "./state", "--defer"]) - assert catalog.nodes["model.test.model"] - - # Check that catalog validates with jsonschema - catalog_dict = catalog.to_dict() - try: - catalog.validate(catalog_dict) - except Exception: - raise pytest.fail("Catalog validation failed") diff --git a/tests/functional/artifacts/test_override.py b/tests/functional/artifacts/test_override.py deleted file mode 100644 index 4352aa1c6..000000000 --- a/tests/functional/artifacts/test_override.py +++ /dev/null @@ -1,38 +0,0 @@ -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.utils import run_dbt - - -model_sql = """ -select 1 as id -""" - - -fail_macros__failure_sql = """ -{% macro get_catalog_relations(information_schema, relations) %} - {% do exceptions.raise_compiler_error('rejected: no catalogs for you') %} -{% endmacro %} - -""" - - -class TestDocsGenerateOverride: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": model_sql} - - @pytest.fixture(scope="class") - def macros(self): - return {"failure.sql": fail_macros__failure_sql} - - def test_override_used( - self, - project, - ): - results = run_dbt(["run"]) - assert len(results) == 1 - # this should pick up our failure macro and raise a compilation exception - with pytest.raises(CompilationError) as excinfo: - run_dbt(["--warn-error", "docs", "generate"]) - assert "rejected: no catalogs for you" in str(excinfo.value) diff --git a/tests/functional/artifacts/test_previous_version_state.py b/tests/functional/artifacts/test_previous_version_state.py deleted file mode 100644 index 5ed37b9a2..000000000 --- a/tests/functional/artifacts/test_previous_version_state.py +++ /dev/null @@ -1,454 +0,0 @@ -import json -import os -import shutil - -from dbt.artifacts.schemas.base import get_artifact_schema_version -from dbt.artifacts.schemas.run import RunResultsArtifact -from dbt.contracts.graph.manifest import WritableManifest -from dbt.artifacts.exceptions import IncompatibleSchemaError -from dbt.tests.util import get_manifest -import pytest - -from tests.functional.utils import run_dbt - - -# This project must have one of each kind of node type, plus disabled versions, for -# test coverage to be complete. -models__my_model_sql = """ -select 1 as id -""" - -models__disabled_model_sql = """ -{{ config(enabled=False) }} -select 2 as id -""" - -seeds__my_seed_csv = """ -id,value -4,2 -""" - -seeds__disabled_seed_csv = """ -id,value -6,4 -""" - -docs__somedoc_md = """ -{% docs somedoc %} -Testing, testing -{% enddocs %} -""" - -macros__do_nothing_sql = """ -{% macro do_nothing(foo2, bar2) %} - select - '{{ foo2 }}' as foo2, - '{{ bar2 }}' as bar2 -{% endmacro %} -""" - -macros__dummy_test_sql = """ -{% test check_nothing(model) %} --- a silly test to make sure that table-level tests show up in the manifest --- without a column_name field - -select 0 - -{% endtest %} -""" - -macros__disabled_dummy_test_sql = """ -{% test disabled_check_nothing(model) %} --- a silly test to make sure that table-level tests show up in the manifest --- without a column_name field - -{{ config(enabled=False) }} -select 0 - -{% endtest %} -""" - -snapshot__snapshot_seed_sql = """ -{% snapshot snapshot_seed %} -{{ - config( - unique_key='id', - strategy='check', - check_cols='all', - target_schema=schema, - ) -}} -select * from {{ ref('my_seed') }} -{% endsnapshot %} -""" - -snapshot__disabled_snapshot_seed_sql = """ -{% snapshot disabled_snapshot_seed %} -{{ - config( - unique_key='id', - strategy='check', - check_cols='all', - target_schema=schema, - enabled=False, - ) -}} -select * from {{ ref('my_seed') }} -{% endsnapshot %} -""" - -tests__just_my_sql = """ -{{ config(tags = ['data_test_tag']) }} - -select * from {{ ref('my_model') }} -where false -""" - -tests__disabled_just_my_sql = """ -{{ config(enabled=False) }} - -select * from {{ ref('my_model') }} -where false -""" - -analyses__a_sql = """ -select 4 as id -""" - -analyses__disabled_a_sql = """ -{{ config(enabled=False) }} -select 9 as id -""" - -metricflow_time_spine_sql = """ -SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day -""" - -# Use old attribute names (v1.0-1.2) to test forward/backward compatibility with the rename in v1.3 -models__schema_yml = """ -version: 2 -models: - - name: my_model - description: "Example model" - data_tests: - - check_nothing - - disabled_check_nothing - columns: - - name: id - data_tests: - - not_null - -semantic_models: - - name: semantic_people - model: ref('my_model') - dimensions: - - name: favorite_color - type: categorical - - name: created_at - type: TIME - type_params: - time_granularity: day - measures: - - name: years_tenure - agg: SUM - expr: tenure - - name: people - agg: count - expr: id - - name: customers - agg: count - expr: id - entities: - - name: id - type: primary - defaults: - agg_time_dimension: created_at - -metrics: - - name: blue_customers_post_2010 - label: Blue Customers since 2010 - type: simple - filter: "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'" - type_params: - measure: - name: customers - filter: "{{ Dimension('id__favorite_color') }} = 'blue'" - - name: customers - label: Customers Metric - type: simple - type_params: - measure: customers - - name: disabled_metric - label: Count records - config: - enabled: False - filter: "{{ Dimension('id__favorite_color') }} = 'blue'" - type: simple - type_params: - measure: customers - - name: ratio_of_blue_customers_to_red_customers - label: Very Important Customer Color Ratio - type: ratio - type_params: - numerator: - name: customers - filter: "{{ Dimension('id__favorite_color')}} = 'blue'" - denominator: - name: customers - filter: "{{ Dimension('id__favorite_color')}} = 'red'" - - name: doubled_blue_customers - type: derived - label: Inflated blue customer numbers - type_params: - expr: 'customers * 2' - metrics: - - name: customers - filter: "{{ Dimension('id__favorite_color')}} = 'blue'" - - -sources: - - name: my_source - description: "My source" - loader: a_loader - tables: - - name: my_table - description: "My table" - identifier: my_seed - - name: disabled_table - description: "Disabled table" - config: - enabled: False - -exposures: - - name: simple_exposure - type: dashboard - depends_on: - - ref('my_model') - - source('my_source', 'my_table') - owner: - email: something@example.com - - name: disabled_exposure - type: dashboard - config: - enabled: False - depends_on: - - ref('my_model') - owner: - email: something@example.com - -seeds: - - name: disabled_seed - config: - enabled: False -""" - -# SETUP: Using this project, we have run past minor versions of dbt -# to generate each contracted version of `manifest.json`. - -# Whenever we bump the manifest version, we should add a new entry for that version -# into `data`, generated from this same project, and update the CURRENT_EXPECTED_MANIFEST_VERSION. -# You can generate the manifest using the generate_latest_manifest() method below. - -# TEST: Then, using the *current* version of dbt (this branch), -# we will perform a `--state` comparison against those older manifests. - -# Some comparisons should succeed, where we expect backward/forward compatibility. - -# Comparisons against older versions should fail, because the structure of the -# WritableManifest class has changed in ways that prevent successful deserialization -# of older JSON manifests. - - -# We are creating enabled versions of every node type that might be in the manifest, -# plus disabled versions for types that support it (everything except macros and docs). - - -class TestPreviousVersionState: - CURRENT_EXPECTED_MANIFEST_VERSION = 12 - CURRENT_EXPECTED_RUN_RESULTS_VERSION = 6 - - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": models__my_model_sql, - "schema.yml": models__schema_yml, - "somedoc.md": docs__somedoc_md, - "disabled_model.sql": models__disabled_model_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, - } - - @pytest.fixture(scope="class") - def seeds(self): - return { - "my_seed.csv": seeds__my_seed_csv, - "disabled_seed.csv": seeds__disabled_seed_csv, - } - - @pytest.fixture(scope="class") - def snapshots(self): - return { - "snapshot_seed.sql": snapshot__snapshot_seed_sql, - "disabled_snapshot_seed.sql": snapshot__disabled_snapshot_seed_sql, - } - - @pytest.fixture(scope="class") - def tests(self): - return { - "just_my.sql": tests__just_my_sql, - "disabled_just_my.sql": tests__disabled_just_my_sql, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "do_nothing.sql": macros__do_nothing_sql, - "dummy_test.sql": macros__dummy_test_sql, - "disabled_dummy_test.sql": macros__disabled_dummy_test_sql, - } - - @pytest.fixture(scope="class") - def analyses(self): - return { - "a.sql": analyses__a_sql, - "disabled_al.sql": analyses__disabled_a_sql, - } - - def test_project(self, project): - # This is mainly used to test changes to the test project in isolation from - # the other noise. - results = run_dbt(["run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - # model, snapshot, seed, singular test, generic test, analysis - assert len(manifest.nodes) == 8 - assert len(manifest.sources) == 1 - assert len(manifest.exposures) == 1 - assert len(manifest.metrics) == 4 - # disabled model, snapshot, seed, singular test, generic test, analysis, source, exposure, metric - assert len(manifest.disabled) == 9 - assert "macro.test.do_nothing" in manifest.macros - - # Use this method when generating a new manifest version for the first time. - # Once generated, we shouldn't need to re-generate or modify the manifest. - def generate_latest_manifest( - self, - project, - current_manifest_version, - ): - run_dbt(["parse"]) - source_path = os.path.join(project.project_root, "target/manifest.json") - state_path = os.path.join(project.test_data_dir, f"state/v{current_manifest_version}") - target_path = os.path.join(state_path, "manifest.json") - os.makedirs(state_path, exist_ok=True) - shutil.copyfile(source_path, target_path) - - # Use this method when generating a new run_results version for the first time. - # Once generated, we shouldn't need to re-generate or modify the manifest. - def generate_latest_run_results( - self, - project, - current_run_results_version, - ): - run_dbt(["run"]) - source_path = os.path.join(project.project_root, "target/run_results.json") - state_path = os.path.join(project.test_data_dir, f"results/v{current_run_results_version}") - target_path = os.path.join(state_path, "run_results.json") - os.makedirs(state_path, exist_ok=True) - shutil.copyfile(source_path, target_path) - - # The actual test method. Run `dbt list --select state:modified --state ...` - # once for each past manifest version. They all have the same content, but different - # schema/structure, only some of which are forward-compatible with the - # current WritableManifest class. - def compare_previous_state( - self, - project, - compare_manifest_version, - expect_pass, - num_results, - ): - state_path = os.path.join(project.test_data_dir, f"state/v{compare_manifest_version}") - cli_args = [ - "list", - "--resource-types", - "model", - "--select", - "state:modified", - "--state", - state_path, - ] - if expect_pass: - results = run_dbt(cli_args, expect_pass=expect_pass) - assert len(results) == num_results - else: - with pytest.raises(IncompatibleSchemaError): - run_dbt(cli_args, expect_pass=expect_pass) - - # The actual test method. Run `dbt retry --state ...` - # once for each past run_results version. They all have the same content, but different - # schema/structure, only some of which are forward-compatible with the - # current WritableManifest class. - def compare_previous_results( - self, - project, - compare_run_results_version, - expect_pass, - num_results, - ): - state_path = os.path.join(project.test_data_dir, f"results/v{compare_run_results_version}") - cli_args = [ - "retry", - "--state", - state_path, - ] - if expect_pass: - results = run_dbt(cli_args, expect_pass=expect_pass) - assert len(results) == num_results - else: - with pytest.raises(IncompatibleSchemaError): - run_dbt(cli_args, expect_pass=expect_pass) - - def test_compare_state_current(self, project): - current_manifest_schema_version = WritableManifest.dbt_schema_version.version - assert ( - current_manifest_schema_version == self.CURRENT_EXPECTED_MANIFEST_VERSION - ), "Sounds like you've bumped the manifest version and need to update this test!" - # If we need a newly generated manifest, uncomment the following line and commit the result - # self.generate_latest_manifest(project, current_manifest_schema_version) - self.compare_previous_state(project, current_manifest_schema_version, True, 0) - - def test_backwards_compatible_versions(self, project): - # manifest schema version 4 and greater should always be forward compatible - for schema_version in range(4, 10): - self.compare_previous_state(project, schema_version, True, 1) - for schema_version in range(10, self.CURRENT_EXPECTED_MANIFEST_VERSION): - self.compare_previous_state(project, schema_version, True, 0) - - def test_nonbackwards_compatible_versions(self, project): - # schema versions 1, 2, 3 are all not forward compatible - for schema_version in range(1, 4): - self.compare_previous_state(project, schema_version, False, 0) - - def test_get_manifest_schema_version(self, project): - for schema_version in range(1, self.CURRENT_EXPECTED_MANIFEST_VERSION): - manifest_path = os.path.join( - project.test_data_dir, f"state/v{schema_version}/manifest.json" - ) - manifest = json.load(open(manifest_path)) - - manifest_version = get_artifact_schema_version(manifest) - assert manifest_version == schema_version - - def test_compare_results_current(self, project): - current_run_results_schema_version = RunResultsArtifact.dbt_schema_version.version - assert ( - current_run_results_schema_version == self.CURRENT_EXPECTED_RUN_RESULTS_VERSION - ), "Sounds like you've bumped the run_results version and need to update this test!" - # If we need a newly generated run_results, uncomment the following line and commit the result - # self.generate_latest_run_results(project, current_run_results_schema_version) - self.compare_previous_results(project, current_run_results_schema_version, True, 0) - - def test_backwards_compatible_run_results_versions(self, project): - # run_results schema version 4 and greater should always be forward compatible - for schema_version in range(4, self.CURRENT_EXPECTED_RUN_RESULTS_VERSION): - self.compare_previous_results(project, schema_version, True, 0) diff --git a/tests/functional/artifacts/test_run_results.py b/tests/functional/artifacts/test_run_results.py deleted file mode 100644 index d8c30a2b7..000000000 --- a/tests/functional/artifacts/test_run_results.py +++ /dev/null @@ -1,74 +0,0 @@ -import json -from multiprocessing import Process -from pathlib import Path - -import pytest - -from tests.functional.utils import run_dbt - -good_model_sql = """ -select 1 as id -""" - - -bad_model_sql = """ -something bad -""" - - -slow_model_sql = """ -{{ config(materialized='table') }} -select id from {{ ref('good_model') }}, pg_sleep(5) -""" - - -class TestRunResultsTimingSuccess: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": good_model_sql} - - def test_timing_exists(self, project): - results = run_dbt(["run"]) - assert len(results.results) == 1 - assert len(results.results[0].timing) > 0 - - -class TestRunResultsTimingFailure: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": bad_model_sql} - - def test_timing_exists(self, project): - results = run_dbt(["run"], expect_pass=False) - assert len(results.results) == 1 - assert len(results.results[0].timing) > 0 - - -# This test is failing due to the faulty assumptions that run_results.json would -# be written multiple times. Temporarily disabling. -@pytest.mark.skip() -class TestRunResultsWritesFileOnSignal: - @pytest.fixture(scope="class") - def models(self): - return {"good_model.sql": good_model_sql, "slow_model.sql": slow_model_sql} - - def test_run_results_are_written_on_signal(self, project): - # Start the runner in a seperate process. - external_process_dbt = Process( - target=run_dbt, args=([["run"]]), kwargs={"expect_pass": False} - ) - external_process_dbt.start() - assert external_process_dbt.is_alive() - - # Wait until the first file write, then kill the process. - run_results_file = Path(project.project_root) / "target/run_results.json" - while run_results_file.is_file() is False: - pass - external_process_dbt.terminate() - - # Wait until the process is dead, then check the file that there is only one result. - while external_process_dbt.is_alive() is True: - pass - with run_results_file.open() as run_results_str: - run_results = json.loads(run_results_str.read()) - assert len(run_results["results"]) == 1 diff --git a/tests/functional/build_command/fixtures.py b/tests/functional/build_command/fixtures.py deleted file mode 100644 index 7f6b17d48..000000000 --- a/tests/functional/build_command/fixtures.py +++ /dev/null @@ -1,302 +0,0 @@ -seeds__country_csv = """iso3,name,iso2,iso_numeric,cow_alpha,cow_numeric,fao_code,un_code,wb_code,imf_code,fips,geonames_name,geonames_id,r_name,aiddata_name,aiddata_code,oecd_name,oecd_code,historical_name,historical_iso3,historical_iso2,historical_iso_numeric -ABW,Aruba,AW,533,,,,533,ABW,314,AA,Aruba,3577279,ARUBA,Aruba,12,Aruba,373,,,, -AFG,Afghanistan,AF,4,AFG,700,2,4,AFG,512,AF,Afghanistan,1149361,AFGHANISTAN,Afghanistan,1,Afghanistan,625,,,, -AGO,Angola,AO,24,ANG,540,7,24,AGO,614,AO,Angola,3351879,ANGOLA,Angola,7,Angola,225,,,, -AIA,Anguilla,AI,660,,,,660,AIA,312,AV,Anguilla,3573511,ANGUILLA,Anguilla,8,Anguilla,376,,,, -ALA,Aland Islands,AX,248,,,,248,ALA,,,Aland Islands,661882,ALAND ISLANDS,,,,,,,, -ALB,Albania,AL,8,ALB,339,3,8,ALB,914,AL,Albania,783754,ALBANIA,Albania,3,Albania,71,,,, -AND,Andorra,AD,20,AND,232,6,20,ADO,,AN,Andorra,3041565,ANDORRA,,,,,,,, -ANT,Netherlands Antilles,AN,530,,,,,ANT,353,NT,Netherlands Antilles,,NETHERLANDS ANTILLES,Netherlands Antilles,211,Netherlands Antilles,361,Netherlands Antilles,ANT,AN,530 -ARE,United Arab Emirates,AE,784,UAE,696,225,784,ARE,466,AE,United Arab Emirates,290557,UNITED ARAB EMIRATES,United Arab Emirates,140,United Arab Emirates,576,,,, -""" - -snapshots__snap_0 = """ -{% snapshot snap_0 %} - -{{ - config( - target_database=database, - target_schema=schema, - unique_key='iso3', - - strategy='timestamp', - updated_at='snap_0_updated_at', - ) -}} - -select *, current_timestamp as snap_0_updated_at from {{ ref('model_0') }} - -{% endsnapshot %} -""" - -snapshots__snap_1 = """ -{% snapshot snap_1 %} - -{{ - config( - target_database=database, - target_schema=schema, - unique_key='iso3', - - strategy='timestamp', - updated_at='snap_1_updated_at', - ) -}} - -SELECT - iso3, - name, - iso2, - iso_numeric, - cow_alpha, - cow_numeric, - fao_code, - un_code, - wb_code, - imf_code, - fips, - geonames_name, - geonames_id, - r_name, - aiddata_name, - aiddata_code, - oecd_name, - oecd_code, - historical_name, - historical_iso3, - historical_iso2, - historical_iso_numeric, - current_timestamp as snap_1_updated_at from {{ ref('model_1') }} - -{% endsnapshot %} -""" - -snapshots__snap_99 = """ -{% snapshot snap_99 %} - -{{ - config( - target_database=database, - target_schema=schema, - strategy='timestamp', - unique_key='num', - updated_at='snap_99_updated_at', - ) -}} - -select *, current_timestamp as snap_99_updated_at from {{ ref('model_99') }} - -{% endsnapshot %} -""" - -models__model_0_sql = """ -{{ config(materialized='table') }} - -select * from {{ ref('countries') }} -""" - -models__model_1_sql = """ -{{ config(materialized='table') }} - -select * from {{ ref('snap_0') }} -""" - -models__model_2_sql = """ -{{ config(materialized='table') }} - -select * from {{ ref('snap_1') }} -""" - -models__model_3_sql = """ -{{ config(materialized='table') }} - -select * from {{ ref('model_1') }} -""" - -models__model_99_sql = """ -{{ config(materialized='table') }} - -select '1' as "num" -""" - -models__test_yml = """ -version: 2 - -models: - - name: model_0 - columns: - - name: iso3 - data_tests: - - unique - - not_null - - name: model_2 - columns: - - name: iso3 - data_tests: - - unique - - not_null -""" - -unit_tests__yml = """ -unit_tests: - - name: ut_model_3 - model: model_3 - given: - - input: ref('model_1') - rows: - - {iso3: ABW, name: Aruba} - expect: - rows: - - {iso3: ABW, name: Aruba} -""" - -models_failing_tests__tests_yml = """ -version: 2 - -models: - - name: model_0 - columns: - - name: iso3 - data_tests: - - unique - - not_null - - name: historical_iso_numeric - data_tests: - - not_null - - name: model_2 - columns: - - name: iso3 - data_tests: - - unique - - not_null -""" - -models_failing__model_1_sql = """ -{{ config(materialized='table') }} - -select bad_column from {{ ref('snap_0') }} -""" - - -models_circular_relationship__test_yml = """ -version: 2 - -models: - - name: model_0 - columns: - - name: iso3 - data_tests: - - relationships: - to: ref('model_1') - field: iso3 - - - name: model_1 - columns: - - name: iso3 - data_tests: - - relationships: - to: ref('model_0') - field: iso3 - -""" - -models_simple_blocking__model_a_sql = """ -select null as id -""" - -models_simple_blocking__model_b_sql = """ -select * from {{ ref('model_a') }} -""" - -models_simple_blocking__test_yml = """ -version: 2 - -models: - - name: model_a - columns: - - name: id - data_tests: - - not_null -""" - -models_triple_blocking__test_yml = """ -version: 2 - -models: - - name: model_a - columns: - - name: id - data_tests: - - not_null - - name: model_b - columns: - - name: id - data_tests: - - not_null - - name: model_c - columns: - - name: id - data_tests: - - not_null -""" - -models_interdependent__model_a_sql = """ -select 1 as id -""" - -models_interdependent__model_b_sql = """ -select * from {{ ref('model_a') }} -""" - -models_interdependent__model_b_null_sql = """ -select null from {{ ref('model_a') }} -""" - - -models_interdependent__model_c_sql = """ -select * from {{ ref('model_b') }} -""" - -models_interdependent__test_yml = """ -version: 2 - -models: - - name: model_a - columns: - - name: id - data_tests: - - unique - - not_null - - relationships: - to: ref('model_b') - field: id - - relationships: - to: ref('model_c') - field: id - - - name: model_b - columns: - - name: id - data_tests: - - unique - - not_null - - relationships: - to: ref('model_a') - field: id - - relationships: - to: ref('model_c') - field: id - - - name: model_c - columns: - - name: id - data_tests: - - unique - - not_null - - relationships: - to: ref('model_a') - field: id - - relationships: - to: ref('model_b') - field: id -""" diff --git a/tests/functional/build_command/test_build.py b/tests/functional/build_command/test_build.py deleted file mode 100644 index 2bf65274d..000000000 --- a/tests/functional/build_command/test_build.py +++ /dev/null @@ -1,211 +0,0 @@ -import pytest - -from tests.functional.build_command import fixtures -from tests.functional.utils import run_dbt - - -class TestBuildBase: - @pytest.fixture(scope="class") - def seeds(self): - return {"countries.csv": fixtures.seeds__country_csv} - - @pytest.fixture(scope="class") - def snapshots(self): - return { - "snap_0.sql": fixtures.snapshots__snap_0, - "snap_1.sql": fixtures.snapshots__snap_1, - "snap_99.sql": fixtures.snapshots__snap_99, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "seeds": { - "quote_columns": False, - }, - } - - -class TestPassingBuild(TestBuildBase): - @pytest.fixture(scope="class") - def models(self): - return { - "model_0.sql": fixtures.models__model_0_sql, - "model_1.sql": fixtures.models__model_1_sql, - "model_2.sql": fixtures.models__model_2_sql, - "model_3.sql": fixtures.models__model_3_sql, - "model_99.sql": fixtures.models__model_99_sql, - "test.yml": fixtures.models__test_yml + fixtures.unit_tests__yml, - } - - def test_build_happy_path(self, project): - run_dbt(["build"]) - - -class TestFailingBuild(TestBuildBase): - @pytest.fixture(scope="class") - def models(self): - return { - "model_0.sql": fixtures.models__model_0_sql, - "model_1.sql": fixtures.models_failing__model_1_sql, - "model_2.sql": fixtures.models__model_2_sql, - "model_3.sql": fixtures.models__model_3_sql, - "model_99.sql": fixtures.models__model_99_sql, - "test.yml": fixtures.models__test_yml + fixtures.unit_tests__yml, - } - - def test_failing_test_skips_downstream(self, project): - results = run_dbt(["build"], expect_pass=False) - assert len(results) == 14 - actual = [str(r.status) for r in results] - expected = ["error"] * 1 + ["skipped"] * 6 + ["pass"] * 2 + ["success"] * 5 - - assert sorted(actual) == sorted(expected) - - -class TestFailingTestsBuild(TestBuildBase): - @pytest.fixture(scope="class") - def models(self): - return { - "model_0.sql": fixtures.models__model_0_sql, - "model_1.sql": fixtures.models__model_1_sql, - "model_2.sql": fixtures.models__model_2_sql, - "model_99.sql": fixtures.models__model_99_sql, - "test.yml": fixtures.models_failing_tests__tests_yml, - } - - def test_failing_test_skips_downstream(self, project): - results = run_dbt(["build"], expect_pass=False) - assert len(results) == 13 - actual = [str(r.status) for r in results] - expected = ["fail"] + ["skipped"] * 6 + ["pass"] * 2 + ["success"] * 4 - assert sorted(actual) == sorted(expected) - - -class TestCircularRelationshipTestsBuild(TestBuildBase): - @pytest.fixture(scope="class") - def models(self): - return { - "model_0.sql": fixtures.models__model_0_sql, - "model_1.sql": fixtures.models__model_1_sql, - "model_99.sql": fixtures.models__model_99_sql, - "test.yml": fixtures.models_circular_relationship__test_yml, - } - - def test_circular_relationship_test_success(self, project): - """Ensure that tests that refer to each other's model don't create - a circular dependency.""" - results = run_dbt(["build"]) - actual = [str(r.status) for r in results] - expected = ["success"] * 7 + ["pass"] * 2 - - assert sorted(actual) == sorted(expected) - - -class TestSimpleBlockingTest: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": fixtures.models_simple_blocking__model_a_sql, - "model_b.sql": fixtures.models_simple_blocking__model_b_sql, - "test.yml": fixtures.models_simple_blocking__test_yml, - } - - def test_simple_blocking_test(self, project): - """Ensure that a failed test on model_a always blocks model_b""" - results = run_dbt(["build"], expect_pass=False) - actual = [r.status for r in results] - expected = ["success", "fail", "skipped"] - assert sorted(actual) == sorted(expected) - - -class TestInterdependentModels: - @pytest.fixture(scope="class") - def seeds(self): - return {"countries.csv": fixtures.seeds__country_csv} - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "seeds": { - "quote_columns": False, - }, - } - - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": fixtures.models_interdependent__model_a_sql, - "model_b.sql": fixtures.models_interdependent__model_b_sql, - "model_c.sql": fixtures.models_interdependent__model_c_sql, - "test.yml": fixtures.models_interdependent__test_yml, - } - - def test_interdependent_models(self, project): - results = run_dbt(["build"]) - assert len(results) == 16 - - -class TestInterdependentModelsFail: - @pytest.fixture(scope="class") - def seeds(self): - return {"countries.csv": fixtures.seeds__country_csv} - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "seeds": { - "quote_columns": False, - }, - } - - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": fixtures.models_interdependent__model_a_sql, - "model_b.sql": fixtures.models_interdependent__model_b_null_sql, - "model_c.sql": fixtures.models_interdependent__model_c_sql, - "test.yml": fixtures.models_interdependent__test_yml, - } - - def test_interdependent_models_fail(self, project): - results = run_dbt(["build"], expect_pass=False) - assert len(results) == 16 - - actual = [str(r.status) for r in results] - expected = ["error"] * 4 + ["skipped"] * 7 + ["pass"] * 2 + ["success"] * 3 - assert sorted(actual) == sorted(expected) - - -class TestDownstreamSelection: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": fixtures.models_simple_blocking__model_a_sql, - "model_b.sql": fixtures.models_simple_blocking__model_b_sql, - "test.yml": fixtures.models_simple_blocking__test_yml, - } - - def test_downstream_selection(self, project): - """Ensure that selecting test+ does not select model_a's other children""" - # fails with "Got 1 result, configured to fail if != 0" - # model_a is defined as select null as id - results = run_dbt(["build", "--select", "model_a not_null_model_a_id+"], expect_pass=False) - assert len(results) == 2 - - -class TestLimitedUpstreamSelection: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": fixtures.models_interdependent__model_a_sql, - "model_b.sql": fixtures.models_interdependent__model_b_sql, - "model_c.sql": fixtures.models_interdependent__model_c_sql, - "test.yml": fixtures.models_triple_blocking__test_yml, - } - - def test_limited_upstream_selection(self, project): - """Ensure that selecting 1+model_c only selects up to model_b (+ tests of both)""" - # Fails with "relation "test17005969872609282880_test_build.model_a" does not exist" - results = run_dbt(["build", "--select", "1+model_c"], expect_pass=False) - assert len(results) == 4 diff --git a/tests/functional/cli/test_cli_exit_codes.py b/tests/functional/cli/test_cli_exit_codes.py deleted file mode 100644 index 87daa36f2..000000000 --- a/tests/functional/cli/test_cli_exit_codes.py +++ /dev/null @@ -1,37 +0,0 @@ -from dbt.cli.exceptions import ResultExit -from dbt.cli.main import cli -import pytest - - -good_sql = """ -select 1 as fun -""" - -bad_sql = """ -someting bad -""" - - -class CliRunnerBase: - def run_cli(self): - ctx = cli.make_context(cli.name, ["run"]) - return cli.invoke(ctx) - - -class TestExitCodeZero(CliRunnerBase): - @pytest.fixture(scope="class") - def models(self): - return {"model_one.sql": good_sql} - - def test_no_exc_thrown(self, project): - self.run_cli() - - -class TestExitCodeOne(CliRunnerBase): - @pytest.fixture(scope="class") - def models(self): - return {"model_one.sql": bad_sql} - - def test_exc_thrown(self, project): - with pytest.raises(ResultExit): - self.run_cli() diff --git a/tests/functional/cli/test_env_var_deprecations.py b/tests/functional/cli/test_env_var_deprecations.py deleted file mode 100644 index 27293a36c..000000000 --- a/tests/functional/cli/test_env_var_deprecations.py +++ /dev/null @@ -1,60 +0,0 @@ -import os - -from dbt.tests.util import read_file, run_dbt -import pytest - - -model_one_sql = """ - select 1 as fun -""" - - -class TestDeprecatedEnvVars: - @pytest.fixture(scope="class") - def models(self): - return {"model_one.sql": model_one_sql} - - def test_defer(self, project, logs_dir): - self.assert_deprecated( - logs_dir, - "DBT_DEFER_TO_STATE", - "DBT_DEFER", - ) - - def test_favor_state(self, project, logs_dir): - self.assert_deprecated( - logs_dir, - "DBT_FAVOR_STATE_MODE", - "DBT_FAVOR_STATE", - command="build", - ) - - def test_print(self, project, logs_dir): - self.assert_deprecated( - logs_dir, - "DBT_NO_PRINT", - "DBT_PRINT", - ) - - def test_state(self, project, logs_dir): - self.assert_deprecated( - logs_dir, - "DBT_ARTIFACT_STATE_PATH", - "DBT_STATE", - old_val=".", - ) - - def assert_deprecated(self, logs_dir, old_env_var, new_env_var, command="run", old_val="0"): - os.environ[old_env_var] = old_val - run_dbt([command]) - - # replacing new lines with spaces accounts for text wrapping - log_file = read_file(logs_dir, "dbt.log").replace("\n", " ").replace("\\n", " ") - dep_str = f"The environment variable `{old_env_var}` has been renamed as `{new_env_var}`" - - try: - assert dep_str in log_file - except Exception as e: - del os.environ[old_env_var] - raise e - del os.environ[old_env_var] diff --git a/tests/functional/cli/test_error_handling.py b/tests/functional/cli/test_error_handling.py deleted file mode 100644 index c70d789cf..000000000 --- a/tests/functional/cli/test_error_handling.py +++ /dev/null @@ -1,20 +0,0 @@ -import pytest - -from tests.functional.utils import run_dbt - - -model_one_sql = """ -someting bad -""" - - -class TestHandledExit: - @pytest.fixture(scope="class") - def models(self): - return {"model_one.sql": model_one_sql} - - def test_failed_run_does_not_throw(self, project): - run_dbt(["run"], expect_pass=False) - - def test_fail_fast_failed_run_does_not_throw(self, project): - run_dbt(["--fail-fast", "run"], expect_pass=False) diff --git a/tests/functional/cli/test_multioption.py b/tests/functional/cli/test_multioption.py deleted file mode 100644 index 6246733fc..000000000 --- a/tests/functional/cli/test_multioption.py +++ /dev/null @@ -1,143 +0,0 @@ -import pytest - -from tests.functional.utils import run_dbt - - -model_one_sql = """ -select 1 as fun -""" - -schema_sql = """ -sources: - - name: my_source - description: "My source" - schema: test_schema - tables: - - name: my_table - - name: my_other_table - -exposures: - - name: weekly_jaffle_metrics - label: By the Week - type: dashboard - maturity: high - url: https://bi.tool/dashboards/1 - description: > - Did someone say "exponential growth"? - depends_on: - - ref('model_one') - owner: - name: dbt Labs - email: data@jaffleshop.com -""" - - -class TestResourceType: - @pytest.fixture(scope="class") - def models(self): - return {"schema.yml": schema_sql, "model_one.sql": model_one_sql} - - def test_resource_type_single(self, project): - result = run_dbt(["-q", "ls", "--resource-types", "model"]) - assert len(result) == 1 - assert result == ["test.model_one"] - - def test_resource_type_quoted(self, project): - result = run_dbt(["-q", "ls", "--resource-types", "model source"]) - assert len(result) == 3 - expected_result = { - "test.model_one", - "source:test.my_source.my_table", - "source:test.my_source.my_other_table", - } - assert set(result) == expected_result - - def test_resource_type_args(self, project): - result = run_dbt( - [ - "-q", - "ls", - "--resource-type", - "model", - "--resource-type", - "source", - "--resource-type", - "exposure", - ] - ) - assert len(result) == 4 - expected_result = { - "test.model_one", - "source:test.my_source.my_table", - "source:test.my_source.my_other_table", - "exposure:test.weekly_jaffle_metrics", - } - assert set(result) == expected_result - - -class TestOutputKeys: - @pytest.fixture(scope="class") - def models(self): - return {"model_one.sql": model_one_sql} - - def test_output_key_single(self, project): - result = run_dbt(["-q", "ls", "--output", "json", "--output-keys", "name"]) - assert len(result) == 1 - assert result == ['{"name": "model_one"}'] - - def test_output_key_quoted(self, project): - result = run_dbt(["-q", "ls", "--output", "json", "--output-keys", "name resource_type"]) - - assert len(result) == 1 - assert result == ['{"name": "model_one", "resource_type": "model"}'] - - def test_output_key_args(self, project): - result = run_dbt( - [ - "-q", - "ls", - "--output", - "json", - "--output-keys", - "name", - "--output-keys", - "resource_type", - ] - ) - - assert len(result) == 1 - assert result == ['{"name": "model_one", "resource_type": "model"}'] - - -class TestSelectExclude: - @pytest.fixture(scope="class") - def models(self): - return { - "model_one.sql": model_one_sql, - "model_two.sql": model_one_sql, - "model_three.sql": model_one_sql, - } - - def test_select_exclude_single(self, project): - result = run_dbt(["-q", "ls", "--select", "model_one"]) - assert len(result) == 1 - assert result == ["test.model_one"] - result = run_dbt(["-q", "ls", "--exclude", "model_one"]) - assert len(result) == 2 - assert "test.model_one" not in result - - def test_select_exclude_quoted(self, project): - result = run_dbt(["-q", "ls", "--select", "model_one model_two"]) - assert len(result) == 2 - assert "test.model_three" not in result - result = run_dbt(["-q", "ls", "--exclude", "model_one model_two"]) - assert len(result) == 1 - assert result == ["test.model_three"] - - def test_select_exclude_args(self, project): - result = run_dbt(["-q", "ls", "--select", "model_one", "--select", "model_two"]) - assert len(result) == 2 - assert "test.model_three" not in result - result = run_dbt(["-q", "ls", "--exclude", "model_one", "--exclude", "model_two"]) - assert len(result) == 1 - assert result == ["test.model_three"] diff --git a/tests/functional/cli/test_resolvers.py b/tests/functional/cli/test_resolvers.py deleted file mode 100644 index e809a4e1c..000000000 --- a/tests/functional/cli/test_resolvers.py +++ /dev/null @@ -1,36 +0,0 @@ -from pathlib import Path - -from dbt.cli.resolvers import default_log_path -import pytest - - -class TestDefaultLogPathNoProject: - def test_default_log_path_no_project(self): - expected_log_path = Path("logs") - actual_log_path = default_log_path("nonexistent_project_dir") - - assert actual_log_path == expected_log_path - - -class TestDefaultLogPathWithProject: - @pytest.fixture(scope="class") - def project_config_update(self): - return {"log-path": "test_default_log_path"} - - def test_default_log_path_with_project(self, project, project_config_update): - expected_log_path = Path(project.project_root) / "test_default_log_path" - actual_log_path = default_log_path(project.project_root) - - assert actual_log_path == expected_log_path - - -class TestDefaultLogPathWithProjectNoConfiguredLogPath: - @pytest.fixture(scope="class") - def project_config_update(self): - return {"log-path": None} - - def test_default_log_path_with_project(self, project, project_config_update): - expected_log_path = Path(project.project_root) / "logs" - actual_log_path = default_log_path(project.project_root) - - assert actual_log_path == expected_log_path diff --git a/tests/functional/configs/fixtures.py b/tests/functional/configs/fixtures.py deleted file mode 100644 index bb50393fc..000000000 --- a/tests/functional/configs/fixtures.py +++ /dev/null @@ -1,201 +0,0 @@ -# NOTE: these fixtures also get used in `/tests/functional/saved_queries/` -import pytest - -models__schema_yml = """ -version: 2 -sources: - - name: raw - database: "{{ target.database }}" - schema: "{{ target.schema }}" - tables: - - name: 'seed' - identifier: "{{ var('seed_name', 'invalid') }}" - columns: - - name: id - data_tests: - - unique: - enabled: "{{ var('enabled_direct', None) | as_native }}" - - accepted_values: - enabled: "{{ var('enabled_direct', None) | as_native }}" - severity: "{{ var('severity_direct', None) | as_native }}" - values: [1,2] - -models: - - name: model - columns: - - name: id - data_tests: - - unique - - accepted_values: - values: [1,2,3,4] - -""" - -models__untagged_sql = """ -{{ - config(materialized='table') -}} - -select id, value from {{ source('raw', 'seed') }} - -""" - -models__tagged__model_sql = """ -{{ - config( - materialized='view', - tags=['tag_two'], - ) -}} - -{{ - config( - materialized='table', - tags=['tag_three'], - ) -}} - -select 4 as id, 2 as value - -""" - -seeds__seed_csv = """id,value -4,2 -""" - -tests__failing_sql = """ - -select 1 as fun - -""" - -tests__sleeper_agent_sql = """ -{{ config( - enabled = var('enabled_direct', False), - severity = var('severity_direct', 'WARN') -) }} - -select 1 as fun - -""" - -my_model = """ -select 1 as user -""" - -my_model_2 = """ -select * from {{ ref('my_model') }} -""" - -my_model_3 = """ -select * from {{ ref('my_model_2') }} -""" - -my_model_2_disabled = """ -{{ config(enabled=false) }} -select * from {{ ref('my_model') }} -""" - -my_model_3_disabled = """ -{{ config(enabled=false) }} -select * from {{ ref('my_model_2') }} -""" - -my_model_2_enabled = """ -{{ config(enabled=true) }} -select * from {{ ref('my_model') }} -""" - -my_model_3_enabled = """ -{{ config(enabled=true) }} -select * from {{ ref('my_model') }} -""" - -schema_all_disabled_yml = """ -version: 2 -models: - - name: my_model - - name: my_model_2 - config: - enabled: false - - name: my_model_3 - config: - enabled: false -""" - -schema_explicit_enabled_yml = """ -version: 2 -models: - - name: my_model - - name: my_model_2 - config: - enabled: true - - name: my_model_3 - config: - enabled: true -""" - -schema_partial_disabled_yml = """ -version: 2 -models: - - name: my_model - - name: my_model_2 - config: - enabled: false - - name: my_model_3 -""" - -schema_partial_enabled_yml = """ -version: 2 -models: - - name: my_model - - name: my_model_2 - config: - enabled: True - - name: my_model_3 -""" - -schema_invalid_enabled_yml = """ -version: 2 -models: - - name: my_model - config: - enabled: True and False - - name: my_model_3 -""" - -simple_snapshot = """{% snapshot mysnapshot %} - - {{ - config( - target_schema='snapshots', - strategy='timestamp', - unique_key='id', - updated_at='updated_at' - ) - }} - - select * from dummy - -{% endsnapshot %}""" - - -class BaseConfigProject: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "untagged.sql": models__untagged_sql, - "tagged": {"model.sql": models__tagged__model_sql}, - } - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed.csv": seeds__seed_csv} - - @pytest.fixture(scope="class") - def tests(self): - return { - "failing.sql": tests__failing_sql, - "sleeper_agent.sql": tests__sleeper_agent_sql, - } diff --git a/tests/functional/configs/test_configs.py b/tests/functional/configs/test_configs.py deleted file mode 100644 index d99b32c48..000000000 --- a/tests/functional/configs/test_configs.py +++ /dev/null @@ -1,141 +0,0 @@ -import os - -from dbt.exceptions import ParsingError -from dbt.tests.util import ( - check_relations_equal, - run_dbt, - update_config_file, - write_file, -) -from dbt_common.dataclass_schema import ValidationError -import pytest - -from tests.functional.configs.fixtures import ( - BaseConfigProject, - simple_snapshot, -) - - -class TestConfigs(BaseConfigProject): - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "tagged": { - # the model configs will override this - "materialized": "invalid", - # the model configs will append to these - "tags": ["tag_one"], - } - }, - }, - "seeds": { - "quote_columns": False, - }, - } - - def test_config_layering( - self, - project, - ): - # run seed - results = run_dbt(["seed"]) - assert len(results) == 1 - - # test the project-level tag, and both config() call tags - assert len(run_dbt(["run", "--model", "tag:tag_one"])) == 1 - assert len(run_dbt(["run", "--model", "tag:tag_two"])) == 1 - assert len(run_dbt(["run", "--model", "tag:tag_three"])) == 1 - check_relations_equal(project.adapter, ["seed", "model"]) - - # make sure we overwrote the materialization properly - tables = project.get_tables_in_schema() - assert tables["model"] == "table" - - -# In addition to testing an alternative target-paths setting, it tests that -# the attribute is jinja rendered and that the context "modules" works. -class TestTargetConfigs(BaseConfigProject): - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "target-path": "target_{{ modules.datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S') }}", - "seeds": { - "quote_columns": False, - }, - } - - def test_alternative_target_paths(self, project): - # chdir to a different directory to test creation of target directory under project_root - os.chdir(project.profiles_dir) - run_dbt(["seed"]) - - target_path = "" - for d in os.listdir(project.project_root): - if os.path.isdir(os.path.join(project.project_root, d)) and d.startswith("target_"): - target_path = d - assert os.path.exists(os.path.join(project.project_root, target_path, "manifest.json")) - - -class TestInvalidTestsMaterializationProj(object): - def test_tests_materialization_proj_config(self, project): - config_patch = {"data_tests": {"materialized": "table"}} - update_config_file(config_patch, project.project_root, "dbt_project.yml") - tests_dir = os.path.join(project.project_root, "tests") - write_file("select * from foo", tests_dir, "test.sql") - - with pytest.raises(ValidationError): - run_dbt() - - -class TestInvalidSeedsMaterializationProj(object): - def test_seeds_materialization_proj_config(self, project): - config_patch = {"seeds": {"materialized": "table"}} - update_config_file(config_patch, project.project_root, "dbt_project.yml") - - seeds_dir = os.path.join(project.project_root, "seeds") - write_file("id1, id2\n1, 2", seeds_dir, "seed.csv") - - with pytest.raises(ValidationError): - run_dbt() - - -class TestInvalidSeedsMaterializationSchema(object): - def test_seeds_materialization_schema_config(self, project): - seeds_dir = os.path.join(project.project_root, "seeds") - write_file( - "version: 2\nseeds:\n - name: myseed\n config:\n materialized: table", - seeds_dir, - "schema.yml", - ) - write_file("id1, id2\n1, 2", seeds_dir, "myseed.csv") - - with pytest.raises(ValidationError): - run_dbt() - - -class TestInvalidSnapshotsMaterializationProj(object): - def test_snapshots_materialization_proj_config(self, project): - config_patch = {"snapshots": {"materialized": "table"}} - update_config_file(config_patch, project.project_root, "dbt_project.yml") - - snapshots_dir = os.path.join(project.project_root, "snapshots") - write_file(simple_snapshot, snapshots_dir, "mysnapshot.sql") - - with pytest.raises(ParsingError): - run_dbt() - - -class TestInvalidSnapshotsMaterializationSchema(object): - def test_snapshots_materialization_schema_config(self, project): - snapshots_dir = os.path.join(project.project_root, "snapshots") - write_file( - "version: 2\nsnapshots:\n - name: mysnapshot\n config:\n materialized: table", - snapshots_dir, - "schema.yml", - ) - write_file(simple_snapshot, snapshots_dir, "mysnapshot.sql") - - with pytest.raises(ValidationError): - run_dbt() diff --git a/tests/functional/configs/test_configs_in_schema_files.py b/tests/functional/configs/test_configs_in_schema_files.py deleted file mode 100644 index c1f370491..000000000 --- a/tests/functional/configs/test_configs_in_schema_files.py +++ /dev/null @@ -1,256 +0,0 @@ -from dbt.exceptions import ParsingError -from dbt.tests.util import ( - check_relations_equal, - get_manifest, - run_dbt, - write_file, -) -from dbt_common.exceptions import CompilationError -import pytest - - -models_alt__schema_yml = """ -version: 2 -sources: - - name: raw - database: "{{ target.database }}" - schema: "{{ target.schema }}" - tables: - - name: 'some_seed' - columns: - - name: id - -models: - - name: model - description: "This is a model description" - config: - tags: ['tag_in_schema'] - meta: - owner: 'Julie Smith' - my_attr: "{{ var('my_var') }}" - materialized: view - - columns: - - name: id - data_tests: - - not_null: - meta: - owner: 'Simple Simon' - - unique: - config: - meta: - owner: 'John Doe' -""" - -models_alt__untagged_sql = """ -{{ - config(materialized='table') -}} - -select id, value from {{ source('raw', 'some_seed') }} -""" - -models_alt__tagged__model_sql = """ -{{ - config( - materialized='view', - tags=['tag_1_in_model'], - ) -}} - -{{ - config( - materialized='table', - tags=['tag_2_in_model'], - ) -}} - -select 4 as id, 2 as value -""" - -models_no_materialized__model_sql = """ -{{ - config( - tags=['tag_1_in_model'], - ) -}} - -{{ - config( - tags=['tag_2_in_model'], - ) -}} - -select 4 as id, 2 as value -""" - -seeds_alt__some_seed_csv = """id,value -4,2 -""" - -extra_alt__untagged_yml = """ -version: 2 - -models: - - name: untagged - description: "This is a model description" - meta: - owner: 'Somebody Else' - config: - meta: - owner: 'Julie Smith' -""" - -extra_alt__untagged2_yml = """ -version: 2 - -models: - - name: untagged - description: "This is a model description" - data_tests: - - not_null: - error_if: ">2" - config: - error_if: ">2" -""" - - -class TestSchemaFileConfigs: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_alt__schema_yml, - "untagged.sql": models_alt__untagged_sql, - "tagged": {"model.sql": models_alt__tagged__model_sql}, - } - - @pytest.fixture(scope="class") - def seeds(self): - return {"some_seed.csv": seeds_alt__some_seed_csv} - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "+meta": { - "company": "NuMade", - }, - "test": { - "+meta": { - "project": "test", - }, - "tagged": { - "+meta": { - "team": "Core Team", - }, - "tags": ["tag_in_project"], - "model": { - "materialized": "table", - "+meta": { - "owner": "Julie Dent", - }, - }, - }, - }, - }, - "vars": { - "test": { - "my_var": "TESTING", - } - }, - "seeds": { - "quote_columns": False, - }, - } - - def test_config_layering( - self, - project, - ): - # run seed - assert len(run_dbt(["seed"])) == 1 - - # test the project-level tag, and both config() call tags - assert len(run_dbt(["run", "--model", "tag:tag_in_project"])) == 1 - assert len(run_dbt(["run", "--model", "tag:tag_1_in_model"])) == 1 - assert len(run_dbt(["run", "--model", "tag:tag_2_in_model"])) == 1 - assert len(run_dbt(["run", "--model", "tag:tag_in_schema"])) == 1 - - # Verify that model nodes have expected tags and meta - manifest = get_manifest(project.project_root) - model_id = "model.test.model" - model_node = manifest.nodes[model_id] - meta_expected = { - "company": "NuMade", - "project": "test", - "team": "Core Team", - "owner": "Julie Smith", - "my_attr": "TESTING", - } - assert model_node.meta == meta_expected - assert model_node.config.meta == meta_expected - model_tags = ["tag_1_in_model", "tag_2_in_model", "tag_in_project", "tag_in_schema"] - model_node_tags = model_node.tags.copy() - model_node_tags.sort() - assert model_node_tags == model_tags - model_node_config_tags = model_node.config.tags.copy() - model_node_config_tags.sort() - assert model_node_config_tags == model_tags - model_meta = { - "company": "NuMade", - "project": "test", - "team": "Core Team", - "owner": "Julie Smith", - "my_attr": "TESTING", - } - assert model_node.config.meta == model_meta - - # make sure we overwrote the materialization properly - tables = project.get_tables_in_schema() - assert tables["model"] == "table" - check_relations_equal(project.adapter, ["some_seed", "model"]) - - # Remove materialized config from model - write_file( - models_no_materialized__model_sql, - project.project_root, - "models", - "tagged", - "model.sql", - ) - results = run_dbt(["run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - model_node = manifest.nodes[model_id] - - assert model_node.config.materialized == "view" - model_unrendered_config = { - "materialized": "view", - "meta": {"my_attr": "TESTING", "owner": "Julie Smith"}, - "tags": ["tag_1_in_model", "tag_2_in_model"], - } - assert model_node.unrendered_config == model_unrendered_config - - # look for test meta - schema_file_id = model_node.patch_path - schema_file = manifest.files[schema_file_id] - tests = schema_file.get_tests("models", "model") - assert tests[0] in manifest.nodes - test = manifest.nodes[tests[0]] - expected_meta = {"owner": "Simple Simon"} - assert test.config.meta == expected_meta - test = manifest.nodes[tests[1]] - expected_meta = {"owner": "John Doe"} - assert test.config.meta == expected_meta - - # copy a schema file with multiple metas - # shutil.copyfile('extra-alt/untagged.yml', 'models-alt/untagged.yml') - write_file(extra_alt__untagged_yml, project.project_root, "models", "untagged.yml") - with pytest.raises(ParsingError): - run_dbt(["run"]) - - # copy a schema file with config key in top-level of test and in config dict - # shutil.copyfile('extra-alt/untagged2.yml', 'models-alt/untagged.yml') - write_file(extra_alt__untagged2_yml, project.project_root, "models", "untagged.yml") - with pytest.raises(CompilationError): - run_dbt(["run"]) diff --git a/tests/functional/configs/test_contract_configs.py b/tests/functional/configs/test_contract_configs.py deleted file mode 100644 index 4be935e95..000000000 --- a/tests/functional/configs/test_contract_configs.py +++ /dev/null @@ -1,527 +0,0 @@ -import os - -from dbt.exceptions import ParsingError -from dbt.tests.util import get_artifact, get_manifest, write_file -from dbt_common.exceptions import ValidationError -import pytest - -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -my_model_sql = """ -{{ - config( - materialized = "table" - ) -}} - -select - 'blue' as color, - 1 as id, - cast('2019-01-01' as date) as date_day -""" - -my_model_contract_sql = """ -{{ - config( - materialized = "table", - contract = {"enforced": true} - ) -}} - -select - 1 as id, - 'blue' as color, - cast('2019-01-01' as date) as date_day -""" - -my_model_contract_disabled_sql = """ -{{ - config( - materialized = "table", - contract = {"enforced": false} - ) -}} - -select - 1 as id, - 'blue' as color, - cast('2019-01-01' as date) as date_day -""" - -my_incremental_model_sql = """ -{{ - config( - materialized = "incremental" - ) -}} - -select - 1 as id, - 'blue' as color, - cast('2019-01-01' as date) as date_day -""" - -my_view_model_sql = """ -{{ - config( - materialized = "view" - ) -}} - -select - 1 as id, - 'blue' as color, - cast('2019-01-01' as date) as date_day -""" - -my_model_python_error = """ -import holidays, s3fs - - -def model(dbt, _): - dbt.config( - materialized="table", - packages=["holidays", "s3fs"], # how to import python libraries in dbt's context - ) - df = dbt.ref("my_model") - df_describe = df.describe() # basic statistics profiling - return df_describe -""" - -model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - contract: - enforced: true - columns: - - name: id - quote: true - data_type: integer - description: hello - constraints: - - type: not_null - - type: primary_key - - type: check - expression: (id > 0) - data_tests: - - unique - - name: color - data_type: string - - name: date_day - data_type: date -""" - -model_schema_alias_types_false_yml = """ -version: 2 -models: - - name: my_model - config: - contract: - enforced: true - alias_types: false - columns: - - name: id - quote: true - data_type: integer - description: hello - constraints: - - type: not_null - - type: primary_key - - type: check - expression: (id > 0) - data_tests: - - unique - - name: color - data_type: string - - name: date_day - data_type: date -""" - -model_schema_ignore_unsupported_yml = """ -version: 2 -models: - - name: my_model - config: - contract: - enforced: true - columns: - - name: id - quote: true - data_type: integer - description: hello - constraints: - - type: not_null - warn_unsupported: False - - type: primary_key - warn_unsupported: False - - type: check - warn_unsupported: False - expression: (id > 0) - data_tests: - - unique - - name: color - data_type: text - - name: date_day - data_type: date -""" - -model_schema_errors_yml = """ -version: 2 -models: - - name: my_model - config: - contract: - enforced: true - columns: - - name: id - data_type: integer - description: hello - constraints: - - type: not_null - - type: primary_key - - type: check - expression: (id > 0) - data_tests: - - unique - - name: color - data_type: text - - name: date_day - - name: python_model - config: - contract: - enforced: true - columns: - - name: id - data_type: integer - description: hello - constraints: - - type: not_null - - type: primary_key - - type: check - expression: (id > 0) - data_tests: - - unique - - name: color - data_type: text - - name: date_day - data_type: date -""" - -model_schema_blank_yml = """ -version: 2 -models: - - name: my_model - config: - contract: - enforced: true -""" - -model_schema_complete_datatypes_yml = """ -version: 2 -models: - - name: my_model - columns: - - name: id - quote: true - data_type: integer - description: hello - constraints: - - type: not_null - - type: primary_key - - type: check - expression: (id > 0) - data_tests: - - unique - - name: color - data_type: text - - name: date_day - data_type: date -""" - -model_schema_incomplete_datatypes_yml = """ -version: 2 -models: - - name: my_model - columns: - - name: id - quote: true - data_type: integer - description: hello - constraints: - - type: not_null - - type: primary_key - - type: check - expression: (id > 0) - data_tests: - - unique - - name: color - - name: date_day - data_type: date -""" - - -class TestModelLevelContractEnabledConfigs: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_sql, - "constraints_schema.yml": model_schema_yml, - } - - def test__model_contract_true(self, project): - run_dbt(["run"]) - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - model = manifest.nodes[model_id] - my_model_columns = model.columns - my_model_config = model.config - contract_actual_config = my_model_config.contract - - assert contract_actual_config.enforced is True - - expected_columns = "{'id': ColumnInfo(name='id', description='hello', meta={}, data_type='integer', constraints=[ColumnLevelConstraint(type=<ConstraintType.not_null: 'not_null'>, name=None, expression=None, warn_unenforced=True, warn_unsupported=True), ColumnLevelConstraint(type=<ConstraintType.primary_key: 'primary_key'>, name=None, expression=None, warn_unenforced=True, warn_unsupported=True), ColumnLevelConstraint(type=<ConstraintType.check: 'check'>, name=None, expression='(id > 0)', warn_unenforced=True, warn_unsupported=True)], quote=True, tags=[], _extra={}), 'color': ColumnInfo(name='color', description='', meta={}, data_type='string', constraints=[], quote=None, tags=[], _extra={}), 'date_day': ColumnInfo(name='date_day', description='', meta={}, data_type='date', constraints=[], quote=None, tags=[], _extra={})}" - - assert expected_columns == str(my_model_columns) - - # compiled fields aren't in the manifest above because it only has parsed fields - manifest_json = get_artifact(project.project_root, "target", "manifest.json") - compiled_code = manifest_json["nodes"][model_id]["compiled_code"] - cleaned_code = " ".join(compiled_code.split()) - assert ( - "select 'blue' as color, 1 as id, cast('2019-01-01' as date) as date_day" - == cleaned_code - ) - - # set alias_types to false (should fail to compile) - write_file( - model_schema_alias_types_false_yml, - project.project_root, - "models", - "constraints_schema.yml", - ) - run_dbt(["run"], expect_pass=False) - - -class TestProjectContractEnabledConfigs: - @pytest.fixture(scope="class") - def project_config_update(self): - return {"models": {"test": {"+contract": {"enforced": True}}}} - - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_sql, - "constraints_schema.yml": model_schema_complete_datatypes_yml, - } - - def test_defined_column_type(self, project): - run_dbt(["run"], expect_pass=True) - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - my_model_config = manifest.nodes[model_id].config - contract_actual_config = my_model_config.contract - assert contract_actual_config.enforced is True - - -class TestProjectContractEnabledConfigsError: - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "+contract": { - "enforced": True, - }, - } - } - } - - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_sql, - "constraints_schema.yml": model_schema_incomplete_datatypes_yml, - } - - def test_undefined_column_type(self, project): - _, log_output = run_dbt_and_capture(["run", "-s", "my_model"], expect_pass=False) - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - my_model_config = manifest.nodes[model_id].config - contract_actual_config = my_model_config.contract - - assert contract_actual_config.enforced is True - - expected_compile_error = "Please ensure that the column name and data_type are defined within the YAML configuration for the ['color'] column(s)." - - assert expected_compile_error in log_output - - -class TestModelContractEnabledConfigs: - @pytest.fixture(scope="class") - def models(self): - return {"my_model.sql": my_model_contract_sql, "constraints_schema.yml": model_schema_yml} - - def test__model_contract(self, project): - run_dbt(["run"]) - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - my_model_config = manifest.nodes[model_id].config - contract_actual_config = my_model_config.contract - assert contract_actual_config.enforced is True - - -class TestModelContractEnabledConfigsMissingDataTypes: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_contract_sql, - "constraints_schema.yml": model_schema_incomplete_datatypes_yml, - } - - def test_undefined_column_type(self, project): - _, log_output = run_dbt_and_capture(["run", "-s", "my_model"], expect_pass=False) - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - my_model_config = manifest.nodes[model_id].config - contract_actual_config = my_model_config.contract - - assert contract_actual_config.enforced is True - - expected_compile_error = "Please ensure that the column name and data_type are defined within the YAML configuration for the ['color'] column(s)." - - assert expected_compile_error in log_output - - -class TestModelLevelContractDisabledConfigs: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_contract_disabled_sql, - "constraints_schema.yml": model_schema_yml, - } - - def test__model_contract_false(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - my_model_config = manifest.nodes[model_id].config - contract_actual_config = my_model_config.contract - - assert contract_actual_config.enforced is False - - -class TestModelLevelContractErrorMessages: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_incremental_model_sql, - "constraints_schema.yml": model_schema_yml, - } - - def test__config_errors(self, project): - with pytest.raises(ValidationError) as err_info: - run_dbt(["run"], expect_pass=False) - - exc_str = " ".join(str(err_info.value).split()) - expected_materialization_error = "Invalid value for on_schema_change: ignore. Models materialized as incremental with contracts enabled must set on_schema_change to 'append_new_columns' or 'fail'" - assert expected_materialization_error in str(exc_str) - - -class TestModelLevelConstraintsErrorMessages: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.py": my_model_python_error, - "constraints_schema.yml": model_schema_yml, - } - - def test__config_errors(self, project): - with pytest.raises(ParsingError) as err_info: - run_dbt(["run"], expect_pass=False) - - exc_str = " ".join(str(err_info.value).split()) - expected_materialization_error = "Language Error: Expected 'sql' but found 'python'" - assert expected_materialization_error in str(exc_str) - # This is a compile time error and we won't get here because the materialization check is parse time - expected_empty_data_type_error = "Columns with `data_type` Blank/Null not allowed on contracted models. Columns Blank/Null: ['date_day']" - assert expected_empty_data_type_error not in str(exc_str) - - -class TestModelLevelConstraintsWarningMessages: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_view_model_sql, - "constraints_schema.yml": model_schema_yml, - } - - def test__config_warning(self, project): - _, log_output = run_dbt_and_capture(["run"]) - - expected_materialization_warning = ( - "Constraint types are not supported for view materializations" - ) - assert expected_materialization_warning in str(log_output) - - # change to not show warnings, message should not be in logs - models_dir = os.path.join(project.project_root, "models") - write_file(model_schema_ignore_unsupported_yml, models_dir, "constraints_schema.yml") - _, log_output = run_dbt_and_capture(["run"]) - - expected_materialization_warning = ( - "Constraint types are not supported for view materializations" - ) - assert expected_materialization_warning not in str(log_output) - - -class TestSchemaContractEnabledConfigs: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_sql, - "constraints_schema.yml": model_schema_blank_yml, - } - - def test__schema_error(self, project): - with pytest.raises(ParsingError) as err_info: - run_dbt(["parse"], expect_pass=False) - - exc_str = " ".join(str(err_info.value).split()) - schema_error_expected = "Constraints must be defined in a `yml` schema configuration file" - assert schema_error_expected in str(exc_str) - - -class TestPythonModelLevelContractErrorMessages: - @pytest.fixture(scope="class") - def models(self): - return { - "python_model.py": my_model_python_error, - "constraints_schema.yml": model_schema_errors_yml, - } - - def test__python_errors(self, project): - with pytest.raises(ParsingError) as err_info: - run_dbt(["parse"], expect_pass=False) - - exc_str = " ".join(str(err_info.value).split()) - expected_python_error = "Language Error: Expected 'sql' but found 'python'" - assert expected_python_error in exc_str - - -class TestModelContractMissingYAMLColumns: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_contract_sql, - } - - def test__missing_column_contract_error(self, project): - results = run_dbt(["run"], expect_pass=False) - expected_error = ( - "This model has an enforced contract, and its 'columns' specification is missing" - ) - assert expected_error in results[0].message diff --git a/tests/functional/configs/test_custom_node_colors_configs.py b/tests/functional/configs/test_custom_node_colors_configs.py deleted file mode 100644 index 8bd55bc5b..000000000 --- a/tests/functional/configs/test_custom_node_colors_configs.py +++ /dev/null @@ -1,341 +0,0 @@ -from dbt.tests.util import get_manifest, run_dbt -from dbt_common.dataclass_schema import ValidationError -import pytest - - -CUSTOM_NODE_COLOR_MODEL_LEVEL = "red" -CUSTOM_NODE_COLOR_SCHEMA_LEVEL = "blue" -CUSTOM_NODE_COLOR_PROJECT_LEVEL_ROOT = "#121212" -CUSTOM_NODE_COLOR_PROJECT_LEVEL_FOLDER = "purple" -CUSTOM_NODE_COLOR_INVALID_HEX = '"#xxx111"' -CUSTOM_NODE_COLOR_INVALID_NAME = "notacolor" - -# F strings are a pain here so replacing XXX with the config above instead -models__custom_node_color__model_sql = """ -{{ config(materialized='view', docs={'node_color': 'XXX'}) }} - -select 1 as id - -""".replace( - "XXX", CUSTOM_NODE_COLOR_MODEL_LEVEL -) - -models__non_custom_node_color__model_sql = """ -{{ config(materialized='view') }} - -select 1 as id - -""" - -models__show_docs_false__model_sql = """ -{{ config(materialized='view', docs={"show": True}) }} - -select 1 as id -""" - -models__custom_node_color__schema_yml = """ -version: 2 - -models: - - name: custom_color_model - description: "This is a model description" - config: - docs: - node_color: {} -""".format( - CUSTOM_NODE_COLOR_SCHEMA_LEVEL -) - - -models__non_custom_node_color__schema_yml = """ -version: 2 - -models: - - name: non_custom_color_model - description: "This is a model description" - config: - docs: - node_color: {} - show: True -""".format( - CUSTOM_NODE_COLOR_SCHEMA_LEVEL -) - -# To check that incorect configs are raising errors -models__non_custom_node_color_invalid_config_docs__schema_yml = """ -version: 2 - -models: - - name: non_custom_node_color - description: "This is a model description" - config: - docs: - node_color: {} - show: True -""".format( - CUSTOM_NODE_COLOR_INVALID_HEX -) - -models__non_custom_node_color_invalid_docs__schema_yml = """ -version: 2 - -models: - - name: non_custom_node_color - description: "This is a model description" - docs: - node_color: {} - show: True -""".format( - CUSTOM_NODE_COLOR_INVALID_NAME -) - -models__custom_node_color_invalid_hex__model_sql = """ -{{ config(materialized='view', docs={"show": True, "node_color": XXX }) }} - -select 1 as id -""".replace( - "XXX", CUSTOM_NODE_COLOR_INVALID_HEX -) - - -class BaseCustomNodeColorModelvsProject: - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "+docs": {"node_color": CUSTOM_NODE_COLOR_PROJECT_LEVEL_ROOT, "show": False}, - "subdirectory": { - "+docs": { - "node_color": CUSTOM_NODE_COLOR_PROJECT_LEVEL_FOLDER, - "show": True, - }, - }, - } - } - } - - -# validation that model level node_color configs supercede dbt_project.yml -class TestModelLevelProjectColorConfigs(BaseCustomNodeColorModelvsProject): - @pytest.fixture(scope="class") - def models(self): - return {"custom_color_model.sql": models__custom_node_color__model_sql} - - def test__model_override_project(self, project): - run_dbt(["compile"]) - manifest = get_manifest(project.project_root) - model_id = "model.test.custom_color_model" - my_model_config = manifest.nodes[model_id].config - my_model_docs = manifest.nodes[model_id].docs - - node_color_actual_config = my_model_config["docs"].node_color - show_actual_config = my_model_config["docs"].show - node_color_actual_docs = my_model_docs.node_color - show_actual_docs = my_model_docs.show - - # check node_color config is in the right spots for each model - assert node_color_actual_config == CUSTOM_NODE_COLOR_MODEL_LEVEL - assert node_color_actual_docs == CUSTOM_NODE_COLOR_MODEL_LEVEL - assert not show_actual_config - assert not show_actual_docs - - -# validation that model level node_color configs supercede schema.yml -class TestModelLevelSchemaColorConfigs(BaseCustomNodeColorModelvsProject): - @pytest.fixture(scope="class") - def models(self): - return { - "custom_color_model.sql": models__custom_node_color__model_sql, - "custom_color_schema.yml": models__custom_node_color__schema_yml, - } - - def test__model_override_schema(self, project): - run_dbt(["compile"]) - manifest = get_manifest(project.project_root) - model_id = "model.test.custom_color_model" - my_model_config = manifest.nodes[model_id].config - my_model_docs = manifest.nodes[model_id].docs - - node_color_actual_config = my_model_config["docs"].node_color - show_actual_config = my_model_config["docs"].show - node_color_actual_docs = my_model_docs.node_color - show_actual_docs = my_model_docs.show - - # check node_color config is in the right spots for each model - assert node_color_actual_config == CUSTOM_NODE_COLOR_MODEL_LEVEL - assert node_color_actual_docs == CUSTOM_NODE_COLOR_MODEL_LEVEL - assert not show_actual_config - assert not show_actual_docs - - -# validation that node_color configured on subdirectories in dbt_project.yml supercedes project root -class TestSubdirectoryColorConfigs(BaseCustomNodeColorModelvsProject): - @pytest.fixture(scope="class") - def models(self): - return { - "subdirectory": { - "non_custom_color_model_subdirectory.sql": models__non_custom_node_color__model_sql - } - } - - def test__project_folder_override_project_root(self, project): - run_dbt(["compile"]) - manifest = get_manifest(project.project_root) - model_id = "model.test.non_custom_color_model_subdirectory" - my_model_config = manifest.nodes[model_id].config - my_model_docs = manifest.nodes[model_id].docs - - node_color_actual_config = my_model_config["docs"].node_color - show_actual_config = my_model_config["docs"].show - node_color_actual_docs = my_model_docs.node_color - show_actual_docs = my_model_docs.show - - # check node_color config is in the right spots for each model - assert node_color_actual_config == CUSTOM_NODE_COLOR_PROJECT_LEVEL_FOLDER - assert node_color_actual_docs == CUSTOM_NODE_COLOR_PROJECT_LEVEL_FOLDER - # in this case show should be True since the dbt_project.yml overrides the root setting for /subdirectory - assert show_actual_config - assert show_actual_docs - - -# validation that node_color configured in schema.yml supercedes dbt_project.yml -class TestSchemaOverProjectColorConfigs(BaseCustomNodeColorModelvsProject): - @pytest.fixture(scope="class") - def models(self): - return { - "non_custom_color_model.sql": models__non_custom_node_color__model_sql, - "non_custom_color_schema.yml": models__non_custom_node_color__schema_yml, - } - - def test__schema_override_project( - self, - project, - ): - run_dbt(["compile"]) - manifest = get_manifest(project.project_root) - - model_id = "model.test.non_custom_color_model" - my_model_config = manifest.nodes[model_id].config - my_model_docs = manifest.nodes[model_id].docs - - node_color_actual_config = my_model_config["docs"].node_color - show_actual_config = my_model_config["docs"].show - node_color_actual_docs = my_model_docs.node_color - show_actual_docs = my_model_docs.show - - # check node_color config is in the right spots for each model - assert node_color_actual_config == CUSTOM_NODE_COLOR_SCHEMA_LEVEL - assert node_color_actual_docs == CUSTOM_NODE_COLOR_SCHEMA_LEVEL - # in this case show should be True since the schema.yml overrides the dbt_project.yml - assert show_actual_config - assert show_actual_docs - - -# validation that docs: show configured in model file supercedes dbt_project.yml -class TestModelOverProjectColorConfigs(BaseCustomNodeColorModelvsProject): - @pytest.fixture(scope="class") - def models(self): - return {"show_docs_override_model.sql": models__show_docs_false__model_sql} - - def test__model_show_overrides_dbt_project( - self, - project, - ): - run_dbt(["compile"]) - manifest = get_manifest(project.project_root) - - model_id = "model.test.show_docs_override_model" - my_model_config = manifest.nodes[model_id].config - my_model_docs = manifest.nodes[model_id].docs - - node_color_actual_config = my_model_config["docs"].node_color - show_actual_config = my_model_config["docs"].show - node_color_actual_docs = my_model_docs.node_color - show_actual_docs = my_model_docs.show - - # check node_color config is in the right spots for each model - assert node_color_actual_config == CUSTOM_NODE_COLOR_PROJECT_LEVEL_ROOT - assert node_color_actual_docs == CUSTOM_NODE_COLOR_PROJECT_LEVEL_ROOT - # in this case show should be True since the schema.yml overrides the dbt_project.yml - assert show_actual_config - assert show_actual_docs - - -# validation that an incorrect color in dbt_project.yml raises an exception -class TestCustomNodeColorIncorrectColorProject: - @pytest.fixture(scope="class") - def models(self): # noqa: F811 - return {"non_custom_node_color.sql": models__non_custom_node_color__model_sql} - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": {"+docs": {"node_color": CUSTOM_NODE_COLOR_INVALID_NAME, "show": False}} - } - } - - def test__invalid_color_project( - self, - project, - ): - with pytest.raises(ValidationError): - run_dbt(["compile"]) - - -# validation that an incorrect color in the config block raises an exception -class TestCustomNodeColorIncorrectColorModelConfig: - @pytest.fixture(scope="class") - def models(self): - return { - "custom_node_color_invalid_hex.sql": models__custom_node_color_invalid_hex__model_sql - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return {"models": {"+docs": {"node_color": "blue", "show": False}}} - - def test__invalid_color_config_block( - self, - project, - ): - with pytest.raises(ValidationError): - run_dbt(["compile"]) - - -# validation that an incorrect color in the YML file raises an exception -class TestCustomNodeColorIncorrectColorNameYMLConfig: - @pytest.fixture(scope="class") - def models(self): - return { - "non_custom_node_color.sql": models__non_custom_node_color__model_sql, - "invalid_custom_color.yml": models__non_custom_node_color_invalid_docs__schema_yml, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return {"models": {"+docs": {"node_color": "blue", "show": False}}} - - def test__invalid_color_docs_not_under_config( - self, - project, - ): - with pytest.raises(ValidationError): - run_dbt(["compile"]) - - -class TestCustomNodeColorIncorrectColorHEXYMLConfig: - @pytest.fixture(scope="class") - def models(self): - return { - "non_custom_node_color.sql": models__non_custom_node_color__model_sql, - "invalid_custom_color.yml": models__non_custom_node_color_invalid_config_docs__schema_yml, - } - - def test__invalid_color_docs_under_config( - self, - project, - ): - with pytest.raises(ValidationError): - run_dbt(["compile"]) diff --git a/tests/functional/configs/test_disabled_configs.py b/tests/functional/configs/test_disabled_configs.py deleted file mode 100644 index a8af7d461..000000000 --- a/tests/functional/configs/test_disabled_configs.py +++ /dev/null @@ -1,90 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.configs.fixtures import BaseConfigProject - - -class TestDisabledConfigs(BaseConfigProject): - @pytest.fixture(scope="class") - def dbt_profile_data(self, unique_schema): - return { - "test": { - "outputs": { - "default": { - "type": "postgres", - # make sure you can do this and get an int out - "threads": "{{ (1 + 3) | as_number }}", - "host": "localhost", - "port": "{{ (5400 + 32) | as_number }}", - "user": "root", - "pass": "password", - "dbname": "dbt", - "schema": unique_schema, - }, - "disabled": { - "type": "postgres", - # make sure you can do this and get an int out - "threads": "{{ (1 + 3) | as_number }}", - "host": "localhost", - "port": "{{ (5400 + 32) | as_number }}", - "user": "root", - "pass": "password", - "dbname": "dbt", - "schema": unique_schema, - }, - }, - "target": "default", - }, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "enabled": "{{ (target.name == 'default' | as_bool) }}", - }, - }, - # set the `var` result in schema.yml to be 'seed', so that the - # `source` call can suceed. - "vars": { - "test": { - "seed_name": "seed", - } - }, - "seeds": { - "quote_columns": False, - "test": { - "seed": { - "enabled": "{{ (target.name == 'default') | as_bool }}", - }, - }, - }, - "data_tests": { - "test": { - "enabled": "{{ (target.name == 'default') | as_bool }}", - "severity": "WARN", - }, - }, - } - - def test_disable_seed_partial_parse(self, project): - run_dbt(["--partial-parse", "seed", "--target", "disabled"]) - run_dbt(["--partial-parse", "seed", "--target", "disabled"]) - - def test_conditional_model(self, project): - # no seeds/models - enabled should eval to False because of the target - results = run_dbt(["seed", "--target", "disabled"]) - assert len(results) == 0 - results = run_dbt(["run", "--target", "disabled"]) - assert len(results) == 0 - results = run_dbt(["test", "--target", "disabled"]) - assert len(results) == 0 - - # has seeds/models - enabled should eval to True because of the target - results = run_dbt(["seed"]) - assert len(results) == 1 - results = run_dbt(["run"]) - assert len(results) == 2 - results = run_dbt(["test"]) - assert len(results) == 5 diff --git a/tests/functional/configs/test_disabled_model.py b/tests/functional/configs/test_disabled_model.py deleted file mode 100644 index 8355d9bf9..000000000 --- a/tests/functional/configs/test_disabled_model.py +++ /dev/null @@ -1,390 +0,0 @@ -from dbt.exceptions import ParsingError -from dbt.tests.util import get_manifest, run_dbt -from dbt_common.dataclass_schema import ValidationError -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.configs import fixtures - - -# ensure double disabled doesn't throw error when set at schema level -class TestSchemaDisabledConfigs: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": fixtures.schema_all_disabled_yml, - "my_model.sql": fixtures.my_model, - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3, - } - - def test_disabled_config(self, project): - run_dbt(["parse"]) - - -# ensure this throws a specific error that the model is disabled -class TestSchemaDisabledConfigsFailure: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": fixtures.schema_partial_disabled_yml, - "my_model.sql": fixtures.my_model, - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3, - } - - def test_disabled_config(self, project): - with pytest.raises(CompilationError) as exc: - run_dbt(["parse"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - expected_msg = "which is disabled" - assert expected_msg in exc_str - - -# ensure double disabled doesn't throw error when set in model configs -class TestModelDisabledConfigs: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": fixtures.my_model, - "my_model_2.sql": fixtures.my_model_2_disabled, - "my_model_3.sql": fixtures.my_model_3_disabled, - } - - def test_disabled_config(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "model.test.my_model_2" not in manifest.nodes - assert "model.test.my_model_3" not in manifest.nodes - - assert "model.test.my_model_2" in manifest.disabled - assert "model.test.my_model_3" in manifest.disabled - - -# ensure config set in project.yml can be overridden in yaml file -class TestOverrideProjectConfigsInYaml: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": fixtures.schema_partial_enabled_yml, - "my_model.sql": fixtures.my_model, - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "my_model_2": { - "enabled": False, - }, - "my_model_3": { - "enabled": False, - }, - }, - } - } - - def test_override_project_yaml_config(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "model.test.my_model_2" in manifest.nodes - assert "model.test.my_model_3" not in manifest.nodes - - assert "model.test.my_model_2" not in manifest.disabled - assert "model.test.my_model_3" in manifest.disabled - - -# ensure config set in project.yml can be overridden in sql file -class TestOverrideProjectConfigsInSQL: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": fixtures.my_model, - "my_model_2.sql": fixtures.my_model_2_enabled, - "my_model_3.sql": fixtures.my_model_3, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "my_model_2": { - "enabled": False, - }, - "my_model_3": { - "enabled": False, - }, - }, - } - } - - def test_override_project_sql_config(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "model.test.my_model_2" in manifest.nodes - assert "model.test.my_model_3" not in manifest.nodes - - assert "model.test.my_model_2" not in manifest.disabled - assert "model.test.my_model_3" in manifest.disabled - - -# ensure false config set in yaml file can be overridden in sql file -class TestOverrideFalseYAMLConfigsInSQL: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": fixtures.schema_all_disabled_yml, - "my_model.sql": fixtures.my_model, - "my_model_2.sql": fixtures.my_model_2_enabled, - "my_model_3.sql": fixtures.my_model_3, - } - - def test_override_yaml_sql_config(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "model.test.my_model_2" in manifest.nodes - assert "model.test.my_model_3" not in manifest.nodes - - assert "model.test.my_model_2" not in manifest.disabled - assert "model.test.my_model_3" in manifest.disabled - - -# ensure true config set in yaml file can be overridden by false in sql file -class TestOverrideTrueYAMLConfigsInSQL: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": fixtures.schema_explicit_enabled_yml, - "my_model.sql": fixtures.my_model, - "my_model_2.sql": fixtures.my_model_2_enabled, - "my_model_3.sql": fixtures.my_model_3_disabled, - } - - def test_override_yaml_sql_config(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "model.test.my_model_2" in manifest.nodes - assert "model.test.my_model_3" not in manifest.nodes - - assert "model.test.my_model_2" not in manifest.disabled - assert "model.test.my_model_3" in manifest.disabled - - -# ensure error when enabling in schema file when multiple nodes exist within disabled -class TestMultipleDisabledNodesForUniqueIDFailure: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": fixtures.schema_partial_enabled_yml, - "my_model.sql": fixtures.my_model, - "folder_1": { - "my_model_2.sql": fixtures.my_model_2_disabled, - "my_model_3.sql": fixtures.my_model_3_disabled, - }, - "folder_2": { - "my_model_2.sql": fixtures.my_model_2_disabled, - "my_model_3.sql": fixtures.my_model_3_disabled, - }, - "folder_3": { - "my_model_2.sql": fixtures.my_model_2_disabled, - "my_model_3.sql": fixtures.my_model_3_disabled, - }, - } - - def test_disabled_config(self, project): - with pytest.raises(ParsingError) as exc: - run_dbt(["parse"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - expected_msg = "Found 3 matching disabled nodes for model 'my_model_2'" - assert expected_msg in exc_str - - -# ensure error when enabling in schema file when multiple nodes exist within disabled -class TestMultipleDisabledNodesSuccess: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": fixtures.my_model, - "folder_1": { - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3, - }, - "folder_2": { - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3, - }, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "folder_1": { - "enabled": False, - }, - "folder_2": { - "enabled": True, - }, - }, - } - } - - def test_multiple_disabled_config(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "model.test.my_model_2" in manifest.nodes - assert "model.test.my_model_3" in manifest.nodes - - expected_file_path = "folder_2" - assert expected_file_path in manifest.nodes["model.test.my_model_2"].original_file_path - assert expected_file_path in manifest.nodes["model.test.my_model_3"].original_file_path - - assert "model.test.my_model_2" in manifest.disabled - assert "model.test.my_model_3" in manifest.disabled - - expected_disabled_file_path = "folder_1" - assert ( - expected_disabled_file_path - in manifest.disabled["model.test.my_model_2"][0].original_file_path - ) - assert ( - expected_disabled_file_path - in manifest.disabled["model.test.my_model_3"][0].original_file_path - ) - - -# ensure overrides work when enabling in sql file when multiple nodes exist within disabled -class TestMultipleDisabledNodesOverrideModel: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": fixtures.my_model, - "folder_1": { - "my_model_2.sql": fixtures.my_model_2_enabled, - "my_model_3.sql": fixtures.my_model_3, - }, - "folder_2": { - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3_enabled, - }, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "folder_1": { - "enabled": False, - }, - "folder_2": { - "enabled": False, - }, - }, - } - } - - def test_multiple_disabled_config(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "model.test.my_model_2" in manifest.nodes - assert "model.test.my_model_3" in manifest.nodes - - expected_file_path_2 = "folder_1" - assert expected_file_path_2 in manifest.nodes["model.test.my_model_2"].original_file_path - expected_file_path_3 = "folder_2" - assert expected_file_path_3 in manifest.nodes["model.test.my_model_3"].original_file_path - - assert "model.test.my_model_2" in manifest.disabled - assert "model.test.my_model_3" in manifest.disabled - - expected_disabled_file_path_2 = "folder_2" - assert ( - expected_disabled_file_path_2 - in manifest.disabled["model.test.my_model_2"][0].original_file_path - ) - expected_disabled_file_path_3 = "folder_1" - assert ( - expected_disabled_file_path_3 - in manifest.disabled["model.test.my_model_3"][0].original_file_path - ) - - -# ensure everything lands where it should when disabling multiple nodes with the same unique id -class TestManyDisabledNodesSuccess: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": fixtures.my_model, - "folder_1": { - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3, - }, - "folder_2": { - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3, - }, - "folder_3": { - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3, - }, - "folder_4": { - "my_model_2.sql": fixtures.my_model_2, - "my_model_3.sql": fixtures.my_model_3, - }, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "models": { - "test": { - "folder_1": { - "enabled": False, - }, - "folder_2": { - "enabled": True, - }, - "folder_3": { - "enabled": False, - }, - "folder_4": { - "enabled": False, - }, - }, - } - } - - def test_many_disabled_config(self, project): - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - assert "model.test.my_model_2" in manifest.nodes - assert "model.test.my_model_3" in manifest.nodes - - expected_file_path = "folder_2" - assert expected_file_path in manifest.nodes["model.test.my_model_2"].original_file_path - assert expected_file_path in manifest.nodes["model.test.my_model_3"].original_file_path - - assert len(manifest.disabled["model.test.my_model_2"]) == 3 - assert len(manifest.disabled["model.test.my_model_3"]) == 3 - - -class TestInvalidEnabledConfig: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": fixtures.schema_invalid_enabled_yml, - "my_model.sql": fixtures.my_model, - } - - def test_invalis_config(self, project): - with pytest.raises(ValidationError) as exc: - run_dbt(["parse"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - expected_msg = "'True and False' is not of type 'boolean'" - assert expected_msg in exc_str diff --git a/tests/functional/configs/test_dupe_paths.py b/tests/functional/configs/test_dupe_paths.py deleted file mode 100644 index b9a98d21c..000000000 --- a/tests/functional/configs/test_dupe_paths.py +++ /dev/null @@ -1,74 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - - -my_model_sql = """ -select 1 as fun -""" - -seed_csv = """id,value -4,2 -""" - -somedoc_md = """ -{% docs somedoc %} -Testing, testing -{% enddocs %} -""" - -schema_yml = """ -version: 2 -models: - - name: my_model - description: testing model -""" - - -# Either a docs or a yml file is necessary to see the problem -# when two of the paths in 'all_source_paths' are the same -class TestDupeProjectPaths: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_sql, - "seed.csv": seed_csv, - "somedoc.md": somedoc_md, - "schema.yml": schema_yml, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "model-paths": ["models"], - "seed-paths": ["models"], - } - - def test_config_with_dupe_paths(self, project, dbt_project_yml): - results = run_dbt(["seed"]) - assert len(results) == 1 - results = run_dbt(["run"]) - assert len(results) == 1 - - -class TestDupeStrippedProjectPaths: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": my_model_sql, - "seed.csv": seed_csv, - "somedoc.md": somedoc_md, - "schema.yml": schema_yml, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "model-paths": ["models/"], - "seed-paths": ["models"], - } - - def test_config_with_dupe_paths(self, project, dbt_project_yml): - results = run_dbt(["seed"]) - assert len(results) == 1 - results = run_dbt(["run"]) - assert len(results) == 1 diff --git a/tests/functional/configs/test_get_default.py b/tests/functional/configs/test_get_default.py deleted file mode 100644 index 36d420e08..000000000 --- a/tests/functional/configs/test_get_default.py +++ /dev/null @@ -1,26 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - - -models_get__any_model_sql = """ --- models/any_model.sql -select {{ config.get('made_up_nonexistent_key', 'default_value') }} as col_value - -""" - - -class TestConfigGetDefault: - @pytest.fixture(scope="class") - def models(self): - return {"any_model.sql": models_get__any_model_sql} - - def test_config_with_get_default( - self, - project, - ): - # This test runs a model with a config.get(key, default) - # The default value is 'default_value' and causes an error - results = run_dbt(["run"], expect_pass=False) - assert len(results) == 1 - assert str(results[0].status) == "error" - assert 'column "default_value" does not exist' in results[0].message diff --git a/tests/functional/configs/test_grant_configs.py b/tests/functional/configs/test_grant_configs.py deleted file mode 100644 index 23b884a16..000000000 --- a/tests/functional/configs/test_grant_configs.py +++ /dev/null @@ -1,155 +0,0 @@ -from dbt.tests.util import ( - get_manifest, - run_dbt, - write_config_file, - write_file, -) -import pytest - - -dbt_project_yml = """ -models: - test: - my_model: - +grants: - my_select: ["reporter", "bi"] -""" - -append_schema_yml = """ -version: 2 -models: - - name: my_model - config: - grants: - +my_select: ["someone"] -""" - - -my_model_base_sql = """ -select 1 as fun -""" - - -my_model_clobber_sql = """ -{{ config(grants={'my_select': ['other_user']}) }} -select 1 as fun -""" - -my_model_extend_sql = """ -{{ config(grants={'+my_select': ['other_user']}) }} -select 1 as fun -""" - -my_model_extend_string_sql = """ -{{ config(grants={'+my_select': 'other_user'}) }} -select 1 as fun -""" - -my_model_extend_twice_sql = """ -{{ config(grants={'+my_select': ['other_user']}) }} -{{ config(grants={'+my_select': ['alt_user']}) }} -select 1 as fun -""" - - -class TestGrantConfigs: - @pytest.fixture(scope="class") - def models(self): - return {"my_model.sql": my_model_base_sql} - - @pytest.fixture(scope="class") - def project_config_update(self): - return dbt_project_yml - - def test_model_grant_config(self, project, logs_dir): - # This test uses "my_select" instead of "select", so we need - # use "parse" instead of "run" because we will get compilation - # errors for the grants. - run_dbt(["parse"]) - - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - assert model_id in manifest.nodes - - model = manifest.nodes[model_id] - model_config = model.config - assert hasattr(model_config, "grants") - - # no schema grant, no model grant, just project - expected = {"my_select": ["reporter", "bi"]} - assert model_config.grants == expected - - # add model grant with clobber - write_file(my_model_clobber_sql, project.project_root, "models", "my_model.sql") - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - model_config = manifest.nodes[model_id].config - - expected = {"my_select": ["other_user"]} - assert model_config.grants == expected - - # change model to extend grants - write_file(my_model_extend_sql, project.project_root, "models", "my_model.sql") - run_dbt(["parse"]) - manifest = get_manifest(project.project_root) - model_config = manifest.nodes[model_id].config - - expected = {"my_select": ["reporter", "bi", "other_user"]} - assert model_config.grants == expected - - # add schema file with extend - write_file(append_schema_yml, project.project_root, "models", "schema.yml") - run_dbt(["parse"]) - - manifest = get_manifest(project.project_root) - model_config = manifest.nodes[model_id].config - - expected = {"my_select": ["reporter", "bi", "someone", "other_user"]} - assert model_config.grants == expected - - # change model file to have string instead of list - write_file(my_model_extend_string_sql, project.project_root, "models", "my_model.sql") - run_dbt(["parse"]) - - manifest = get_manifest(project.project_root) - model_config = manifest.nodes[model_id].config - - expected = {"my_select": ["reporter", "bi", "someone", "other_user"]} - assert model_config.grants == expected - - # change model file to have string instead of list - write_file(my_model_extend_twice_sql, project.project_root, "models", "my_model.sql") - run_dbt(["parse"]) - - manifest = get_manifest(project.project_root) - model_config = manifest.nodes[model_id].config - - expected = {"my_select": ["reporter", "bi", "someone", "other_user", "alt_user"]} - assert model_config.grants == expected - - # Remove grant from dbt_project - config = { - "config-version": 2, - "name": "test", - "version": "0.1.0", - "profile": "test", - "log-path": logs_dir, - } - write_config_file(config, project.project_root, "dbt_project.yml") - run_dbt(["parse"]) - - manifest = get_manifest(project.project_root) - model_config = manifest.nodes[model_id].config - - expected = {"my_select": ["someone", "other_user", "alt_user"]} - assert model_config.grants == expected - - # Remove my_model config, leaving only schema file - write_file(my_model_base_sql, project.project_root, "models", "my_model.sql") - run_dbt(["parse"]) - - manifest = get_manifest(project.project_root) - model_config = manifest.nodes[model_id].config - - expected = {"my_select": ["someone"]} - assert model_config.grants == expected diff --git a/tests/functional/configs/test_indiv_tests.py b/tests/functional/configs/test_indiv_tests.py deleted file mode 100644 index 1084760a2..000000000 --- a/tests/functional/configs/test_indiv_tests.py +++ /dev/null @@ -1,58 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.configs.fixtures import BaseConfigProject - - -class TestConfigIndivTests(BaseConfigProject): - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "seeds": { - "quote_columns": False, - }, - "vars": { - "test": { - "seed_name": "seed", - } - }, - "data_tests": {"test": {"enabled": True, "severity": "WARN"}}, - } - - def test_configuring_individual_tests( - self, - project, - ): - assert len(run_dbt(["seed"])) == 1 - assert len(run_dbt(["run"])) == 2 - - # all tests on (minus sleeper_agent) + WARN - assert len(run_dbt(["test"])) == 5 - - # turn off two of them directly - assert len(run_dbt(["test", "--vars", '{"enabled_direct": False}'])) == 3 - - # turn on sleeper_agent data test directly - assert ( - len( - run_dbt( - ["test", "--models", "sleeper_agent", "--vars", '{"enabled_direct": True}'] - ) - ) - == 1 - ) - - # set three to ERROR directly - results = run_dbt( - [ - "test", - "--models", - "config.severity:error", - "--vars", - '{"enabled_direct": True, "severity_direct": "ERROR"}', - ], - expect_pass=False, - ) - assert len(results) == 2 - assert results[0].status == "fail" - assert results[1].status == "fail" diff --git a/tests/functional/configs/test_unused_configs.py b/tests/functional/configs/test_unused_configs.py deleted file mode 100644 index a01ebc01c..000000000 --- a/tests/functional/configs/test_unused_configs.py +++ /dev/null @@ -1,52 +0,0 @@ -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -seeds__seed_csv = """id,value -4,2 -""" - - -class TestUnusedModelConfigs: - @pytest.fixture(scope="class") - def seeds(self): - return {"seed.csv": seeds__seed_csv} - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "test-paths": ["does-not-exist"], - "models": { - "test": { - "enabled": True, - } - }, - "seeds": { - "quote_columns": False, - }, - "sources": { - "test": { - "enabled": True, - } - }, - "data_tests": { - "test": { - "enabled": True, - } - }, - } - - def test_warn_unused_configuration_paths( - self, - project, - ): - with pytest.raises(CompilationError) as excinfo: - run_dbt(["--warn-error", "seed"]) - - assert "Configuration paths exist" in str(excinfo.value) - assert "- sources.test" in str(excinfo.value) - assert "- models.test" in str(excinfo.value) - assert "- models.test" in str(excinfo.value) - - run_dbt(["seed"]) diff --git a/tests/functional/duplicates/test_duplicate_analysis.py b/tests/functional/duplicates/test_duplicate_analysis.py deleted file mode 100644 index 742320406..000000000 --- a/tests/functional/duplicates/test_duplicate_analysis.py +++ /dev/null @@ -1,32 +0,0 @@ -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -my_model_sql = """ -select 1 as id -""" - -my_analysis_sql = """ -select * from {{ ref('my_model') }} -""" - - -class TestDuplicateAnalysis: - @pytest.fixture(scope="class") - def models(self): - return {"my_model.sql": my_model_sql} - - @pytest.fixture(scope="class") - def analyses(self): - return { - "anlysis-1": {"model.sql": my_analysis_sql}, - "anlysis-2": {"model.sql": my_analysis_sql}, - } - - def test_duplicate_model_enabled(self, project): - message = "dbt found two analyses with the name" - with pytest.raises(CompilationError) as exc: - run_dbt(["compile"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - assert message in exc_str diff --git a/tests/functional/duplicates/test_duplicate_exposure.py b/tests/functional/duplicates/test_duplicate_exposure.py deleted file mode 100644 index 9ece78f91..000000000 --- a/tests/functional/duplicates/test_duplicate_exposure.py +++ /dev/null @@ -1,30 +0,0 @@ -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -exposure_dupes_schema_yml = """ -version: 2 -exposures: - - name: something - type: dashboard - owner: - email: test@example.com - - name: something - type: dashboard - owner: - email: test@example.com - -""" - - -class TestDuplicateExposure: - @pytest.fixture(scope="class") - def models(self): - return {"schema.yml": exposure_dupes_schema_yml} - - def test_duplicate_exposure(self, project): - message = "dbt found two exposures with the name" - with pytest.raises(CompilationError) as exc: - run_dbt(["compile"]) - assert message in str(exc.value) diff --git a/tests/functional/duplicates/test_duplicate_macro.py b/tests/functional/duplicates/test_duplicate_macro.py deleted file mode 100644 index 5c910e468..000000000 --- a/tests/functional/duplicates/test_duplicate_macro.py +++ /dev/null @@ -1,71 +0,0 @@ -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -bad_same_macros_sql = """ -{% macro some_macro() %} -{% endmacro %} - -{% macro some_macro() %} -{% endmacro %} - -""" - -bad_separate_one_sql = """ -{% macro some_macro() %} -{% endmacro %} - -""" - -bad_separate_two_sql = """ -{% macro some_macro() %} -{% endmacro %} - -""" - -model_sql = """ -select 1 as value -""" - - -class TestDuplicateMacroEnabledSameFile: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": model_sql} - - @pytest.fixture(scope="class") - def macros(self): - return { - "macro.sql": bad_same_macros_sql, - } - - def test_duplicate_macros(self, project): - message = 'dbt found two macros named "some_macro" in the project' - with pytest.raises(CompilationError) as exc: - run_dbt(["parse"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - assert message in exc_str - assert "macro.sql" in exc_str - - -class TestDuplicateMacroEnabledDifferentFiles: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": model_sql} - - @pytest.fixture(scope="class") - def macros(self): - return { - "one.sql": bad_separate_one_sql, - "two.sql": bad_separate_two_sql, - } - - def test_duplicate_macros(self, project): - message = 'dbt found two macros named "some_macro" in the project' - with pytest.raises(CompilationError) as exc: - run_dbt(["compile"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - assert message in exc_str - assert "one.sql" in exc_str - assert "two.sql" in exc_str diff --git a/tests/functional/duplicates/test_duplicate_metric.py b/tests/functional/duplicates/test_duplicate_metric.py deleted file mode 100644 index 676edc6b5..000000000 --- a/tests/functional/duplicates/test_duplicate_metric.py +++ /dev/null @@ -1,40 +0,0 @@ -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -metric_dupes_schema_yml = """ -version: 2 - -metrics: - - - name: number_of_people - label: "Number of people" - description: Total count of people - type: simple - type_params: - measure: "people" - meta: - my_meta: 'testing' - - - name: number_of_people - label: "Collective tenure" - description: Total number of years of team experience - type: simple - type_params: - measure: - name: "years_tenure" - filter: "{{ Dimension('people_entity__loves_dbt') }} is true" -""" - - -class TestDuplicateMetric: - @pytest.fixture(scope="class") - def models(self): - return {"schema.yml": metric_dupes_schema_yml} - - def test_duplicate_metric(self, project): - message = "dbt found two metrics with the name" - with pytest.raises(CompilationError) as exc: - run_dbt(["compile"]) - assert message in str(exc.value) diff --git a/tests/functional/duplicates/test_duplicate_model.py b/tests/functional/duplicates/test_duplicate_model.py deleted file mode 100644 index 650d54d29..000000000 --- a/tests/functional/duplicates/test_duplicate_model.py +++ /dev/null @@ -1,263 +0,0 @@ -from dbt.exceptions import AmbiguousAliasError -from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import get_manifest, run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -disabled_model_sql = """ -{{ - config( - enabled=False, - materialized="table", - ) -}} - -select 1 - -""" - -enabled_model_sql = """ -{{ - config( - enabled=True, - materialized="table", - ) -}} - -select 1 as value - -""" - -dbt_project_yml = """ -name: 'local_dep' -version: '1.0' -config-version: 2 - -profile: 'default' - -model-paths: ["models"] - -seeds: - quote_columns: False - -""" - -local_dep_schema_yml = """ -models: - - name: table_model - config: - alias: table_model_local_dep - columns: - - name: id - data_tests: - - unique -""" - -local_dep_versions_schema_yml = """ -models: - - name: table_model - config: - alias: table_model_local_dep - versions: - - v: 1 -""" - - -class TestDuplicateModelEnabled: - @pytest.fixture(scope="class") - def models(self): - return { - "model-enabled-1": {"model.sql": enabled_model_sql}, - "model-enabled-2": {"model.sql": enabled_model_sql}, - } - - def test_duplicate_model_enabled(self, project): - message = "dbt found two models with the name" - with pytest.raises(CompilationError) as exc: - run_dbt(["compile"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - assert message in exc_str - - -class TestDuplicateModelDisabled: - @pytest.fixture(scope="class") - def models(self): - return { - "model-disabled": {"model.sql": disabled_model_sql}, - "model-enabled": {"model.sql": enabled_model_sql}, - } - - def test_duplicate_model_disabled(self, project): - results = run_dbt(["compile"]) - assert len(results) == 1 - - manifest = get_manifest(project.project_root) - - model_id = "model.test.model" - assert model_id in manifest.nodes - assert model_id in manifest.disabled - - def test_duplicate_model_disabled_partial_parsing(self, project): - run_dbt(["clean"]) - results = run_dbt(["--partial-parse", "compile"]) - assert len(results) == 1 - results = run_dbt(["--partial-parse", "compile"]) - assert len(results) == 1 - results = run_dbt(["--partial-parse", "compile"]) - assert len(results) == 1 - - -class TestDuplicateModelAliasEnabledAcrossPackages: - @pytest.fixture(scope="class") - def models(self): - return {"table_model.sql": enabled_model_sql} - - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - local_dependency_files = { - "dbt_project.yml": dbt_project_yml, - "models": {"table_model.sql": enabled_model_sql}, - } - write_project_files(project_root, "local_dependency", local_dependency_files) - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "local_dependency"}]} - - def test_duplicate_model_alias_enabled_across_packages(self, project): - run_dbt(["deps"]) - message = "dbt found two resources with the database representation" - with pytest.raises(AmbiguousAliasError) as exc: - run_dbt(["run"]) - assert message in str(exc.value) - - -class TestDuplicateModelDisabledAcrossPackages: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - local_dependency_files = { - "dbt_project.yml": dbt_project_yml, - "models": {"table_model.sql": enabled_model_sql}, - } - write_project_files(project_root, "local_dependency", local_dependency_files) - - @pytest.fixture(scope="class") - def models(self): - return {"table_model.sql": disabled_model_sql} - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "local_dependency"}]} - - def test_duplicate_model_disabled_across_packages(self, project): - run_dbt(["deps"]) - results = run_dbt(["compile"]) - assert len(results) == 1 - - manifest = get_manifest(project.project_root) - local_dep_model_id = "model.local_dep.table_model" - model_id = "model.test.table_model" - assert local_dep_model_id in manifest.nodes - assert model_id in manifest.disabled - - -class TestDuplicateModelNameWithTestAcrossPackages: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - local_dependency_files = { - "dbt_project.yml": dbt_project_yml, - "models": {"table_model.sql": enabled_model_sql, "schema.yml": local_dep_schema_yml}, - } - write_project_files(project_root, "local_dependency", local_dependency_files) - - @pytest.fixture(scope="class") - def models(self): - return {"table_model.sql": enabled_model_sql} - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "local_dependency"}]} - - def test_duplicate_model_name_with_test_across_packages(self, project): - run_dbt(["deps"]) - manifest = run_dbt(["parse"]) - assert len(manifest.nodes) == 3 - - # model nodes with duplicate names exist - local_dep_model_node_id = "model.local_dep.table_model" - root_model_node_id = "model.test.table_model" - assert local_dep_model_node_id in manifest.nodes - assert root_model_node_id in manifest.nodes - - # test node exists and is attached to correct node - test_node_id = "test.local_dep.unique_table_model_id.1da9e464d9" - assert test_node_id in manifest.nodes - assert manifest.nodes[test_node_id].attached_node == local_dep_model_node_id - - -class TestDuplicateModelNameWithVersionAcrossPackages: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - local_dependency_files = { - "dbt_project.yml": dbt_project_yml, - "models": { - "table_model.sql": enabled_model_sql, - "schema.yml": local_dep_versions_schema_yml, - }, - } - write_project_files(project_root, "local_dependency", local_dependency_files) - - @pytest.fixture(scope="class") - def models(self): - return {"table_model.sql": enabled_model_sql} - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "local_dependency"}]} - - def test_duplicate_model_name_with_test_across_packages(self, project): - run_dbt(["deps"]) - manifest = run_dbt(["parse"]) - assert len(manifest.nodes) == 2 - - # model nodes with duplicate names exist - local_dep_model_node_id = "model.local_dep.table_model.v1" - root_model_node_id = "model.test.table_model" - assert local_dep_model_node_id in manifest.nodes - assert root_model_node_id in manifest.nodes - - -class TestModelTestOverlap: - @pytest.fixture(scope="class") - def models(self): - return {"table_model.sql": enabled_model_sql} - - @property - def project_config(self): - return { - "config-version": 2, - "test-paths": ["models"], - } - - def test_duplicate_test_model_paths(self, project): - # this should be ok: test/model overlap is fine - run_dbt(["compile"]) - run_dbt(["--partial-parse", "compile"]) - run_dbt(["--partial-parse", "compile"]) - - -class TestMultipleDisabledModels: - @pytest.fixture(scope="class") - def models(self): - return { - "subdir3": {"model_alt.sql": disabled_model_sql}, - "subdir2": {"model_alt.sql": disabled_model_sql}, - "subdir1": {"model_alt.sql": enabled_model_sql}, - } - - def test_multiple_disabled_models(self, project): - run_dbt(["compile"]) - manifest = get_manifest(project.project_root) - model_id = "model.test.model_alt" - assert model_id in manifest.nodes diff --git a/tests/functional/duplicates/test_duplicate_resource.py b/tests/functional/duplicates/test_duplicate_resource.py deleted file mode 100644 index 36ab4d919..000000000 --- a/tests/functional/duplicates/test_duplicate_resource.py +++ /dev/null @@ -1,33 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - - -models_naming_dupes_schema_yml = """ -version: 2 -models: - - name: something - description: This table has basic information about orders, as well as some derived facts based on payments -exposure: - - name: something - -""" - -something_model_sql = """ - -select 1 as item - -""" - - -class TestDuplicateSchemaResource: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_naming_dupes_schema_yml, - "something.sql": something_model_sql, - } - - # a model and an exposure can share the same name - def test_duplicate_model_and_exposure(self, project): - result = run_dbt(["compile"]) - assert len(result) == 1 diff --git a/tests/functional/duplicates/test_duplicate_source.py b/tests/functional/duplicates/test_duplicate_source.py deleted file mode 100644 index adb9dee4c..000000000 --- a/tests/functional/duplicates/test_duplicate_source.py +++ /dev/null @@ -1,26 +0,0 @@ -from dbt.tests.util import run_dbt -from dbt_common.exceptions import CompilationError -import pytest - - -source_dupes_schema_yml = """ -version: 2 -sources: - - name: something - tables: - - name: dupe - - name: dupe - -""" - - -class TestDuplicateSourceEnabled: - @pytest.fixture(scope="class") - def models(self): - return {"schema.yml": source_dupes_schema_yml} - - def test_duplicate_source_enabled(self, project): - message = "dbt found two sources with the name" - with pytest.raises(CompilationError) as exc: - run_dbt(["compile"]) - assert message in str(exc.value) diff --git a/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py b/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py deleted file mode 100644 index 3a28bc778..000000000 --- a/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py +++ /dev/null @@ -1,51 +0,0 @@ -from dbt.exceptions import ParsingError -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.simple_snapshot.fixtures import ( - macros__test_no_overlaps_sql, - models__ref_snapshot_sql, - models__schema_yml, -) - - -snapshots_invalid__snapshot_sql = """ -{# make sure to never name this anything with `target_schema` in the name, or the test will be invalid! #} -{% snapshot missing_field_target_underscore_schema %} - {# missing the mandatory target_schema parameter #} - {{ - config( - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.seed - -{% endsnapshot %} -""" - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot.sql": snapshots_invalid__snapshot_sql} - - -@pytest.fixture(scope="class") -def models(): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - -@pytest.fixture(scope="class") -def macros(): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - -def test_missing_strategy(project): - with pytest.raises(ParsingError) as exc: - run_dbt(["compile"], expect_pass=False) - - assert "Snapshots must be configured with a 'strategy'" in str(exc.value) From 7c19b3335a996fecbc4b42a22b86a02e2eca6f73 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 18 Jun 2024 17:03:49 -0400 Subject: [PATCH 078/114] [Regression] Fix psycopg2 version install (#113) --- .../unreleased/Fixes-20240605-202614.yaml | 7 + .github/scripts/psycopg2-check.sh | 20 + .github/workflows/integration-tests.yml | 47 +- .gitignore | 6 + README.md | 14 + pyproject.toml | 5 +- tests/functional/test_selection/conftest.py | 96 --- .../test_selection_expansion.py | 567 ------------------ 8 files changed, 80 insertions(+), 682 deletions(-) create mode 100644 .changes/unreleased/Fixes-20240605-202614.yaml create mode 100755 .github/scripts/psycopg2-check.sh delete mode 100644 tests/functional/test_selection/conftest.py delete mode 100644 tests/functional/test_selection/test_selection_expansion.py diff --git a/.changes/unreleased/Fixes-20240605-202614.yaml b/.changes/unreleased/Fixes-20240605-202614.yaml new file mode 100644 index 000000000..b7ab8eb06 --- /dev/null +++ b/.changes/unreleased/Fixes-20240605-202614.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Default to psycopg2-binary and allow overriding to psycopg2 via DBT_PSYCOPG2_NAME + (restores previous behavior) +time: 2024-06-05T20:26:14.801254-04:00 +custom: + Author: mikealfare + Issue: "96" diff --git a/.github/scripts/psycopg2-check.sh b/.github/scripts/psycopg2-check.sh new file mode 100755 index 000000000..faee902c1 --- /dev/null +++ b/.github/scripts/psycopg2-check.sh @@ -0,0 +1,20 @@ +python -m venv venv +source venv/bin/activate +python -m pip install . + +if [[ "$PSYCOPG2_WORKAROUND" == true ]]; then + if [[ $(pip show psycopg2-binary) ]]; then + PSYCOPG2_VERSION=$(pip show psycopg2-binary | grep Version | cut -d " " -f 2) + pip uninstall -y psycopg2-binary + pip install psycopg2==$PSYCOPG2_VERSION + fi +fi + +PSYCOPG2_NAME=$((pip show psycopg2 || pip show psycopg2-binary) | grep Name | cut -d " " -f 2) +if [[ "$PSYCOPG2_NAME" != "$PSYCOPG2_EXPECTED_NAME" ]]; then + echo -e 'Expected: "$PSYCOPG2_EXPECTED_NAME" but found: "$PSYCOPG2_NAME"' + exit 1 +fi + +deactivate +rm -r ./venv diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 1aefb7f5e..d73671908 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -45,7 +45,7 @@ defaults: jobs: integration: name: Integration Tests - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 strategy: fail-fast: false @@ -102,24 +102,41 @@ jobs: psycopg2-check: name: "Test psycopg2 build version" - runs-on: ${{ matrix.scenario.platform }} + runs-on: ${{ matrix.platform }} strategy: fail-fast: false matrix: - scenario: - - {platform: ubuntu-latest, psycopg2-name: psycopg2} - - {platform: macos-12, psycopg2-name: psycopg2-binary} + platform: [ubuntu-22.04, macos-12] + python-version: ["3.8", "3.11"] steps: - name: "Check out repository" uses: actions/checkout@v4 - - name: "Test psycopg2 name" - run: | - python -m pip install . - PSYCOPG2_PIP_ENTRY=$(pip list | grep "psycopg2 " || pip list | grep psycopg2-binary) - echo $PSYCOPG2_PIP_ENTRY - PSYCOPG2_NAME="${PSYCOPG2_PIP_ENTRY%% *}" - echo $PSYCOPG2_NAME - if [[ "${PSYCOPG2_NAME}" != "${{ matrix.scenario.psycopg2-name }}" ]]; then - exit 1 - fi + - name: "Set up Python ${{ matrix.python-version }}" + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: "Test psycopg2 name - default" + run: .github/scripts/psycopg2-check.sh + env: + PSYCOPG2_EXPECTED_NAME: psycopg2-binary + + - name: "Test psycopg2 name - invalid override" + run: .github/scripts/psycopg2-check.sh + env: + DBT_PSYCOPG2_NAME: rubber-baby-buggy-bumpers + PSYCOPG2_EXPECTED_NAME: psycopg2-binary + + - name: "Test psycopg2 name - override" + run: .github/scripts/psycopg2-check.sh + env: + DBT_PSYCOPG2_NAME: psycopg2 + PSYCOPG2_EXPECTED_NAME: psycopg2-binary # we have not implemented the hook yet, so this doesn't work + + - name: "Test psycopg2 name - manual override" + # verify that the workaround documented in the `README.md` continues to work + run: .github/scripts/psycopg2-check.sh + env: + PSYCOPG2_WORKAROUND: true + PSYCOPG2_EXPECTED_NAME: psycopg2 diff --git a/.gitignore b/.gitignore index 094ee4a93..b8d4acccf 100644 --- a/.gitignore +++ b/.gitignore @@ -161,3 +161,9 @@ cython_debug/ # testing artifacts /logs + +# MacOS +.DS_Store + +# vscode +.vscode/ diff --git a/README.md b/README.md index d5b8900ae..285f5144d 100644 --- a/README.md +++ b/README.md @@ -21,6 +21,20 @@ more information on using dbt with Postgres, consult [the docs](https://docs.get - [Install dbt](https://docs.getdbt.com/docs/installation) - Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/) +### `psycopg2-binary` vs. `psycopg2` + +By default, `dbt-postgres` installs `psycopg2-binary`. This is great for development, and even testing, as it does not require any OS dependencies; it's a pre-built wheel. However, building `psycopg2` from source will grant performance improvements that are desired in a production environment. In order to install `psycopg2`, use the following steps: + +```bash +if [[ $(pip show psycopg2-binary) ]]; then + PSYCOPG2_VERSION=$(pip show psycopg2-binary | grep Version | cut -d " " -f 2) + pip uninstall -y psycopg2-binary + pip install psycopg2==$PSYCOPG2_VERSION +fi +``` + +This ensures the version of `psycopg2` will match that of `psycopg2-binary`. + ## Join the dbt Community - Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/) diff --git a/pyproject.toml b/pyproject.toml index a99829d99..92fbf82a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,10 +23,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", ] dependencies = [ - # install `psycopg2` on linux (assumed production) - 'psycopg2>=2.9,<3.0; platform_system == "Linux"', - # install `psycopg2-binary` on macos/windows (assumed development) - 'psycopg2-binary>=2.9,<3.0; platform_system != "Linux"', + "psycopg2-binary>=2.9,<3.0", "dbt-adapters>=0.1.0a1,<2.0", # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency "dbt-core>=1.8.0a1", diff --git a/tests/functional/test_selection/conftest.py b/tests/functional/test_selection/conftest.py deleted file mode 100644 index 2faa9e34b..000000000 --- a/tests/functional/test_selection/conftest.py +++ /dev/null @@ -1,96 +0,0 @@ -from dbt.tests.fixtures.project import write_project_files -import pytest - - -tests__cf_a_b_sql = """ -select * from {{ ref('model_a') }} -cross join {{ ref('model_b') }} -where false -""" - -tests__cf_a_src_sql = """ -select * from {{ ref('model_a') }} -cross join {{ source('my_src', 'my_tbl') }} -where false -""" - -tests__just_a_sql = """ -{{ config(tags = ['data_test_tag']) }} - -select * from {{ ref('model_a') }} -where false -""" - -models__schema_yml = """ -version: 2 - -sources: - - name: my_src - schema: "{{ target.schema }}" - tables: - - name: my_tbl - identifier: model_b - columns: - - name: fun - data_tests: - - unique - -models: - - name: model_a - columns: - - name: fun - tags: [column_level_tag] - data_tests: - - unique - - relationships: - to: ref('model_b') - field: fun - tags: [test_level_tag] - - relationships: - to: source('my_src', 'my_tbl') - field: fun -""" - -models__model_b_sql = """ -{{ config( - tags = ['a_or_b'] -) }} - -select 1 as fun -""" - -models__model_a_sql = """ -{{ config( - tags = ['a_or_b'] -) }} - -select * FROM {{ref('model_b')}} -""" - - -@pytest.fixture(scope="class") -def tests(): - return { - "cf_a_b.sql": tests__cf_a_b_sql, - "cf_a_src.sql": tests__cf_a_src_sql, - "just_a.sql": tests__just_a_sql, - } - - -@pytest.fixture(scope="class") -def models(): - return { - "schema.yml": models__schema_yml, - "model_b.sql": models__model_b_sql, - "model_a.sql": models__model_a_sql, - } - - -@pytest.fixture(scope="class") -def project_files( - project_root, - tests, - models, -): - write_project_files(project_root, "tests", tests) - write_project_files(project_root, "models", models) diff --git a/tests/functional/test_selection/test_selection_expansion.py b/tests/functional/test_selection/test_selection_expansion.py deleted file mode 100644 index d17f27d7f..000000000 --- a/tests/functional/test_selection/test_selection_expansion.py +++ /dev/null @@ -1,567 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - - -class TestSelectionExpansion: - @pytest.fixture(scope="class") - def project_config_update(self): - return {"config-version": 2, "test-paths": ["tests"]} - - def list_tests_and_assert( - self, - include, - exclude, - expected_tests, - indirect_selection="eager", - selector_name=None, - ): - list_args = ["ls", "--resource-type", "test"] - if include: - list_args.extend(("--select", include)) - if exclude: - list_args.extend(("--exclude", exclude)) - if indirect_selection: - list_args.extend(("--indirect-selection", indirect_selection)) - if selector_name: - list_args.extend(("--selector", selector_name)) - - listed = run_dbt(list_args) - assert len(listed) == len(expected_tests) - - test_names = [name.split(".")[-1] for name in listed] - assert sorted(test_names) == sorted(expected_tests) - - def run_tests_and_assert( - self, - include, - exclude, - expected_tests, - indirect_selection="eager", - selector_name=None, - ): - results = run_dbt(["run"]) - assert len(results) == 2 - - test_args = ["test"] - if include: - test_args.extend(("--models", include)) - if exclude: - test_args.extend(("--exclude", exclude)) - if indirect_selection: - test_args.extend(("--indirect-selection", indirect_selection)) - if selector_name: - test_args.extend(("--selector", selector_name)) - - results = run_dbt(test_args) - tests_run = [r.node.name for r in results] - assert len(tests_run) == len(expected_tests) - - assert sorted(tests_run) == sorted(expected_tests) - - def test_all_tests_no_specifiers( - self, - project, - ): - select = None - exclude = None - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "source_unique_my_src_my_tbl_fun", - "unique_model_a_fun", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_alone( - self, - project, - ): - select = "model_a" - exclude = None - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "unique_model_a_fun", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_model_b( - self, - project, - ): - select = "model_a model_b" - exclude = None - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "unique_model_a_fun", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_sources( - self, - project, - ): - select = "model_a source:*" - exclude = None - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "unique_model_a_fun", - "source_unique_my_src_my_tbl_fun", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_exclude_model_b( - self, - project, - ): - select = None - exclude = "model_b" - expected = [ - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "source_unique_my_src_my_tbl_fun", - "unique_model_a_fun", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_exclude_specific_test( - self, - project, - ): - select = "model_a" - exclude = "unique_model_a_fun" - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_exclude_specific_test_cautious( - self, - project, - ): - select = "model_a" - exclude = "unique_model_a_fun" - expected = ["just_a"] - indirect_selection = "cautious" - - self.list_tests_and_assert(select, exclude, expected, indirect_selection) - self.run_tests_and_assert(select, exclude, expected, indirect_selection) - - def test_model_a_exclude_specific_test_buildable( - self, - project, - ): - select = "model_a" - exclude = "unique_model_a_fun" - expected = [ - "just_a", - "cf_a_b", - "cf_a_src", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - ] - indirect_selection = "buildable" - - self.list_tests_and_assert(select, exclude, expected, indirect_selection) - self.run_tests_and_assert(select, exclude, expected, indirect_selection) - - def test_only_generic( - self, - project, - ): - select = "test_type:generic" - exclude = None - expected = [ - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "source_unique_my_src_my_tbl_fun", - "unique_model_a_fun", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_only_singular_unset( - self, - project, - ): - select = "model_a,test_type:singular" - exclude = None - expected = ["cf_a_b", "cf_a_src", "just_a"] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_only_singular_eager( - self, - project, - ): - select = "model_a,test_type:singular" - exclude = None - expected = ["cf_a_b", "cf_a_src", "just_a"] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_only_singular_cautious( - self, - project, - ): - select = "model_a,test_type:singular" - exclude = None - expected = ["just_a"] - indirect_selection = "cautious" - - self.list_tests_and_assert( - select, exclude, expected, indirect_selection=indirect_selection - ) - self.run_tests_and_assert(select, exclude, expected, indirect_selection=indirect_selection) - - def test_only_singular( - self, - project, - ): - select = "test_type:singular" - exclude = None - expected = ["cf_a_b", "cf_a_src", "just_a"] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_only_singular( - self, - project, - ): - select = "model_a,test_type:singular" - exclude = None - expected = ["cf_a_b", "cf_a_src", "just_a"] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_test_name_intersection( - self, - project, - ): - select = "model_a,test_name:unique" - exclude = None - expected = ["unique_model_a_fun"] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_tag_test_name_intersection( - self, - project, - ): - select = "tag:a_or_b,test_name:relationships" - exclude = None - expected = [ - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_select_column_level_tag( - self, - project, - ): - select = "tag:column_level_tag" - exclude = None - expected = [ - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "unique_model_a_fun", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_exclude_column_level_tag( - self, - project, - ): - select = None - exclude = "tag:column_level_tag" - expected = ["cf_a_b", "cf_a_src", "just_a", "source_unique_my_src_my_tbl_fun"] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_test_level_tag( - self, - project, - ): - select = "tag:test_level_tag" - exclude = None - expected = ["relationships_model_a_fun__fun__ref_model_b_"] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_exclude_data_test_tag( - self, - project, - ): - select = "model_a" - exclude = "tag:data_test_tag" - expected = [ - "cf_a_b", - "cf_a_src", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "unique_model_a_fun", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_indirect_selection( - self, - project, - ): - select = "model_a" - exclude = None - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "unique_model_a_fun", - ] - - self.list_tests_and_assert(select, exclude, expected) - self.run_tests_and_assert(select, exclude, expected) - - def test_model_a_indirect_selection_eager( - self, - project, - ): - select = "model_a" - exclude = None - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "unique_model_a_fun", - ] - indirect_selection = "eager" - - self.list_tests_and_assert(select, exclude, expected, indirect_selection) - self.run_tests_and_assert(select, exclude, expected, indirect_selection) - - def test_model_a_indirect_selection_cautious( - self, - project, - ): - select = "model_a" - exclude = None - expected = [ - "just_a", - "unique_model_a_fun", - ] - indirect_selection = "cautious" - - self.list_tests_and_assert(select, exclude, expected, indirect_selection) - self.run_tests_and_assert(select, exclude, expected, indirect_selection) - - def test_model_a_indirect_selection_buildable( - self, - project, - ): - select = "model_a" - exclude = None - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "unique_model_a_fun", - ] - indirect_selection = "buildable" - - self.list_tests_and_assert(select, exclude, expected, indirect_selection) - self.run_tests_and_assert(select, exclude, expected, indirect_selection) - - def test_model_a_indirect_selection_exclude_unique_tests( - self, - project, - ): - select = "model_a" - exclude = "test_name:unique" - indirect_selection = "eager" - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - ] - - self.list_tests_and_assert(select, exclude, expected, indirect_selection) - self.run_tests_and_assert(select, exclude, expected, indirect_selection=indirect_selection) - - def test_model_a_indirect_selection_empty(self, project): - results = run_dbt(["ls", "--indirect-selection", "empty", "--select", "model_a"]) - assert len(results) == 1 - - -class TestExpansionWithSelectors(TestSelectionExpansion): - @pytest.fixture(scope="class") - def selectors(self): - return """ - selectors: - - name: model_a_unset_indirect_selection - definition: - method: fqn - value: model_a - - name: model_a_cautious_indirect_selection - definition: - method: fqn - value: model_a - indirect_selection: "cautious" - - name: model_a_eager_indirect_selection - definition: - method: fqn - value: model_a - indirect_selection: "eager" - - name: model_a_buildable_indirect_selection - definition: - method: fqn - value: model_a - indirect_selection: "buildable" - """ - - def test_selector_model_a_unset_indirect_selection( - self, - project, - ): - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "unique_model_a_fun", - ] - - self.list_tests_and_assert( - include=None, - exclude=None, - expected_tests=expected, - selector_name="model_a_unset_indirect_selection", - ) - self.run_tests_and_assert( - include=None, - exclude=None, - expected_tests=expected, - selector_name="model_a_unset_indirect_selection", - ) - - def test_selector_model_a_cautious_indirect_selection( - self, - project, - ): - expected = ["just_a", "unique_model_a_fun"] - - self.list_tests_and_assert( - include=None, - exclude=None, - expected_tests=expected, - selector_name="model_a_cautious_indirect_selection", - ) - self.run_tests_and_assert( - include=None, - exclude=None, - expected_tests=expected, - selector_name="model_a_cautious_indirect_selection", - ) - - def test_selector_model_a_eager_indirect_selection( - self, - project, - ): - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "unique_model_a_fun", - ] - - self.list_tests_and_assert( - include=None, - exclude=None, - expected_tests=expected, - selector_name="model_a_eager_indirect_selection", - ) - self.run_tests_and_assert( - include=None, - exclude=None, - expected_tests=expected, - selector_name="model_a_eager_indirect_selection", - ) - - def test_selector_model_a_buildable_indirect_selection( - self, - project, - ): - expected = [ - "cf_a_b", - "cf_a_src", - "just_a", - "relationships_model_a_fun__fun__ref_model_b_", - "relationships_model_a_fun__fun__source_my_src_my_tbl_", - "unique_model_a_fun", - ] - - self.list_tests_and_assert( - include=None, - exclude=None, - expected_tests=expected, - selector_name="model_a_buildable_indirect_selection", - ) - self.run_tests_and_assert( - include=None, - exclude=None, - expected_tests=expected, - selector_name="model_a_buildable_indirect_selection", - ) From 07a585b6ff059be4484f97a1cda18c16a68f0a36 Mon Sep 17 00:00:00 2001 From: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> Date: Thu, 20 Jun 2024 17:44:09 -0500 Subject: [PATCH 079/114] update user docs-issue workflow (#115) --- .github/workflows/docs-issue.yml | 41 ++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 .github/workflows/docs-issue.yml diff --git a/.github/workflows/docs-issue.yml b/.github/workflows/docs-issue.yml new file mode 100644 index 000000000..f49cf517c --- /dev/null +++ b/.github/workflows/docs-issue.yml @@ -0,0 +1,41 @@ +# **what?** +# Open an issue in docs.getdbt.com when an issue is labeled `user docs` and closed as completed + +# **why?** +# To reduce barriers for keeping docs up to date + +# **when?** +# When an issue is labeled `user docs` and is closed as completed. Can be labeled before or after the issue is closed. + + +name: Open issues in docs.getdbt.com repo when an issue is labeled +run-name: "Open an issue in docs.getdbt.com for issue #${{ github.event.issue.number }}" + +on: + issues: + types: [labeled, closed] + +defaults: + run: + shell: bash + +permissions: + issues: write # comments on issues + +jobs: + open_issues: + # we only want to run this when the issue is closed as completed and the label `user docs` has been assigned. + # If this logic does not exist in this workflow, it runs the + # risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having + # generating the comment before the other runs. This lives here instead of the shared workflow because this is where we + # decide if it should run or not. + if: | + (github.event.issue.state == 'closed' && github.event.issue.state_reason == 'completed') && ( + (github.event.action == 'closed' && contains(github.event.issue.labels.*.name, 'user docs')) || + (github.event.action == 'labeled' && github.event.label.name == 'user docs')) + uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main + with: + issue_repository: "dbt-labs/docs.getdbt.com" + issue_title: "Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}" + issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated." + secrets: inherit From f21b89c2549d4cb367d49c9c077329946ba1fded Mon Sep 17 00:00:00 2001 From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Thu, 20 Jun 2024 15:55:45 -0700 Subject: [PATCH 080/114] Add skip option and remove version_number (#116) --- .github/workflows/release-internal.yml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release-internal.yml b/.github/workflows/release-internal.yml index 01f1a9e3d..c272b8093 100644 --- a/.github/workflows/release-internal.yml +++ b/.github/workflows/release-internal.yml @@ -15,10 +15,6 @@ name: "Release internal patch" on: workflow_dispatch: inputs: - version_number: - description: "The release version number (i.e. 1.0.0b1)" - type: string - required: true ref: description: "The ref (sha or branch name) to use" type: string @@ -29,6 +25,11 @@ on: type: string default: "python -c \"import dbt.adapters.postgres\"" required: true + skip_tests: + description: "Should the tests be skipped? (default to false)" + type: boolean + required: true + default: false defaults: run: @@ -41,9 +42,9 @@ jobs: uses: "dbt-labs/dbt-release/.github/workflows/internal-archive-release.yml@main" with: - version_number: "${{ inputs.version_number }}" package_test_command: "${{ inputs.package_test_command }}" dbms_name: "postgres" ref: "${{ inputs.ref }}" + skip_tests: "${{ inputs.skip_tests }}" secrets: "inherit" From e7c4f790918609810dd58815b407f026176bb90f Mon Sep 17 00:00:00 2001 From: Marta Paes <marta.paes.moreira@gmail.com> Date: Mon, 8 Jul 2024 20:05:11 +0200 Subject: [PATCH 081/114] Support persisting docs for `materialized_view` materializations (#98) Co-authored-by: Anders <anders.swanson@dbtlabs.com> Co-authored-by: Colin <colin.rogers@dbtlabs.com> --- .../unreleased/Fixes-20240626-163930.yaml | 6 +++ dbt/include/postgres/macros/adapters.sql | 7 ++- .../shared_tests/test_persist_docs.py | 48 +++++++++++++++++++ 3 files changed, 60 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Fixes-20240626-163930.yaml diff --git a/.changes/unreleased/Fixes-20240626-163930.yaml b/.changes/unreleased/Fixes-20240626-163930.yaml new file mode 100644 index 000000000..37fcc56f9 --- /dev/null +++ b/.changes/unreleased/Fixes-20240626-163930.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix `persist_docs` for `materialized_view` materializations. Previously, using this configuration with materialized view models would lead to an error. +time: 2024-06-26T16:39:30.455995+02:00 +custom: + Author: morsapaes + Issue: "120" diff --git a/dbt/include/postgres/macros/adapters.sql b/dbt/include/postgres/macros/adapters.sql index 294443be2..1d20e6b3f 100644 --- a/dbt/include/postgres/macros/adapters.sql +++ b/dbt/include/postgres/macros/adapters.sql @@ -196,7 +196,12 @@ {% macro postgres__alter_relation_comment(relation, comment) %} {% set escaped_comment = postgres_escape_comment(comment) %} - comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }}; + {% if relation.type == 'materialized_view' -%} + {% set relation_type = "materialized view" %} + {%- else -%} + {%- set relation_type = relation.type -%} + {%- endif -%} + comment on {{ relation_type }} {{ relation }} is {{ escaped_comment }}; {% endmacro %} diff --git a/tests/functional/shared_tests/test_persist_docs.py b/tests/functional/shared_tests/test_persist_docs.py index 2653ca4ae..f1ad342b6 100644 --- a/tests/functional/shared_tests/test_persist_docs.py +++ b/tests/functional/shared_tests/test_persist_docs.py @@ -1,8 +1,29 @@ +import pytest +import json + +from dbt.tests.adapter.materialized_view import files from dbt.tests.adapter.persist_docs.test_persist_docs import ( BasePersistDocs, BasePersistDocsColumnMissing, BasePersistDocsCommentOnQuotedColumn, ) +from tests.functional.utils import run_dbt + +_MATERIALIZED_VIEW_PROPERTIES__SCHEMA_YML = """ +version: 2 + +models: + - name: my_materialized_view + description: | + Materialized view model description "with double quotes" + and with 'single quotes' as welll as other; + '''abc123''' + reserved -- characters + 80% of statistics are made up on the spot + -- + /* comment */ + Some $lbl$ labeled $lbl$ and $$ unlabeled $$ dollar-quoting +""" class TestPersistDocs(BasePersistDocs): @@ -15,3 +36,30 @@ class TestPersistDocsColumnMissing(BasePersistDocsColumnMissing): class TestPersistDocsCommentOnQuotedColumn(BasePersistDocsCommentOnQuotedColumn): pass + + +class TestPersistDocsWithMaterializedView(BasePersistDocs): + @pytest.fixture(scope="class", autouse=True) + def seeds(self): + return {"my_seed.csv": files.MY_SEED} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_materialized_view.sql": files.MY_MATERIALIZED_VIEW, + } + + @pytest.fixture(scope="class") + def properties(self): + return { + "schema.yml": _MATERIALIZED_VIEW_PROPERTIES__SCHEMA_YML, + } + + def test_has_comments_pglike(self, project): + run_dbt(["docs", "generate"]) + with open("target/catalog.json") as fp: + catalog_data = json.load(fp) + assert "nodes" in catalog_data + assert len(catalog_data["nodes"]) == 2 + view_node = catalog_data["nodes"]["model.test.my_materialized_view"] + assert view_node["metadata"]["comment"].startswith("Materialized view model description") From 00df40d3923732afd251bf3a8cbb3581b0a7e125 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Fri, 12 Jul 2024 16:07:53 -0700 Subject: [PATCH 082/114] Base 207/add test (#119) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> Co-authored-by: Mike Alfare <mike.alfare@dbtlabs.com> --- tests/functional/shared_tests/test_show.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/functional/shared_tests/test_show.py b/tests/functional/shared_tests/test_show.py index 47974a04c..7f3da925e 100644 --- a/tests/functional/shared_tests/test_show.py +++ b/tests/functional/shared_tests/test_show.py @@ -1,6 +1,7 @@ from dbt.tests.adapter.dbt_show.test_dbt_show import ( BaseShowLimit, BaseShowSqlHeader, + BaseShowDoesNotHandleDoubleLimit, ) @@ -10,3 +11,7 @@ class TestPostgresShowSqlHeader(BaseShowSqlHeader): class TestPostgresShowLimit(BaseShowLimit): pass + + +class TestPostgresShowDoesNotHandleDoubleLimit(BaseShowDoesNotHandleDoubleLimit): + pass From 7a4c0219fee0da0b6dcc086929e3985601f8d82a Mon Sep 17 00:00:00 2001 From: Gerda Shank <gerda.shank@gmail.com> Date: Mon, 15 Jul 2024 22:29:35 -0400 Subject: [PATCH 083/114] Remove snapshot tests (#127) --- .../{shared_tests => adapter}/__init__.py | 0 .../{shared_tests => adapter}/test_aliases.py | 0 .../{shared_tests => adapter}/test_basic.py | 0 .../{shared_tests => adapter}/test_caching.py | 0 .../{shared_tests => adapter}/test_clone.py | 0 .../test_column_types.py | 0 .../test_concurrency.py | 0 .../test_constraints.py | 0 .../test_data_types.py | 0 .../{shared_tests => adapter}/test_debug.py | 0 .../{shared_tests => adapter}/test_empty.py | 0 .../test_ephemeral.py | 0 .../{shared_tests => adapter}/test_grants.py | 0 .../test_hooks/data/seed_model.sql | 0 .../test_hooks/data/seed_run.sql | 0 .../test_hooks/test_hooks.py | 0 .../test_incremental.py | 0 .../test_persist_docs.py | 0 .../test_query_comment.py | 0 .../test_relations.py | 0 .../{shared_tests => adapter}/test_show.py | 0 .../test_simple_copy.py | 0 .../test_simple_seed/seed_bom.csv | 0 .../test_simple_seed/test_simple_seed.py | 0 .../test_simple_snapshot.py | 0 .../test_store_test_failures.py | 0 .../test_unit_testing.py | 0 .../{shared_tests => adapter}/test_utils.py | 0 .../data/invalidate_postgres.sql | 27 -- .../simple_snapshot/data/seed_pg.sql | 223 ---------- .../simple_snapshot/data/shared_macros.sql | 80 ---- .../simple_snapshot/data/update.sql | 261 ------------ tests/functional/simple_snapshot/fixtures.py | 389 ------------------ .../simple_snapshot/test_basic_snapshot.py | 373 ----------------- .../test_changing_check_cols_snapshot.py | 127 ------ .../test_changing_strategy_snapshot.py | 128 ------ .../test_check_cols_snapshot.py | 113 ----- .../test_check_cols_updated_at_snapshot.py | 114 ----- .../test_comment_ending_snapshot.py | 36 -- .../test_cross_schema_snapshot.py | 48 --- .../test_hard_delete_snapshot.py | 192 --------- .../test_invalid_namespace_snapshot.py | 67 --- .../test_long_text_snapshot.py | 70 ---- .../test_renamed_source_snapshot.py | 74 ---- .../test_select_exclude_snapshot.py | 161 -------- .../test_slow_query_snapshot.py | 82 ---- 46 files changed, 2565 deletions(-) rename tests/functional/{shared_tests => adapter}/__init__.py (100%) rename tests/functional/{shared_tests => adapter}/test_aliases.py (100%) rename tests/functional/{shared_tests => adapter}/test_basic.py (100%) rename tests/functional/{shared_tests => adapter}/test_caching.py (100%) rename tests/functional/{shared_tests => adapter}/test_clone.py (100%) rename tests/functional/{shared_tests => adapter}/test_column_types.py (100%) rename tests/functional/{shared_tests => adapter}/test_concurrency.py (100%) rename tests/functional/{shared_tests => adapter}/test_constraints.py (100%) rename tests/functional/{shared_tests => adapter}/test_data_types.py (100%) rename tests/functional/{shared_tests => adapter}/test_debug.py (100%) rename tests/functional/{shared_tests => adapter}/test_empty.py (100%) rename tests/functional/{shared_tests => adapter}/test_ephemeral.py (100%) rename tests/functional/{shared_tests => adapter}/test_grants.py (100%) rename tests/functional/{shared_tests => adapter}/test_hooks/data/seed_model.sql (100%) rename tests/functional/{shared_tests => adapter}/test_hooks/data/seed_run.sql (100%) rename tests/functional/{shared_tests => adapter}/test_hooks/test_hooks.py (100%) rename tests/functional/{shared_tests => adapter}/test_incremental.py (100%) rename tests/functional/{shared_tests => adapter}/test_persist_docs.py (100%) rename tests/functional/{shared_tests => adapter}/test_query_comment.py (100%) rename tests/functional/{shared_tests => adapter}/test_relations.py (100%) rename tests/functional/{shared_tests => adapter}/test_show.py (100%) rename tests/functional/{shared_tests => adapter}/test_simple_copy.py (100%) rename tests/functional/{shared_tests => adapter}/test_simple_seed/seed_bom.csv (100%) rename tests/functional/{shared_tests => adapter}/test_simple_seed/test_simple_seed.py (100%) rename tests/functional/{shared_tests => adapter}/test_simple_snapshot.py (100%) rename tests/functional/{shared_tests => adapter}/test_store_test_failures.py (100%) rename tests/functional/{shared_tests => adapter}/test_unit_testing.py (100%) rename tests/functional/{shared_tests => adapter}/test_utils.py (100%) delete mode 100644 tests/functional/simple_snapshot/data/invalidate_postgres.sql delete mode 100644 tests/functional/simple_snapshot/data/seed_pg.sql delete mode 100644 tests/functional/simple_snapshot/data/shared_macros.sql delete mode 100644 tests/functional/simple_snapshot/data/update.sql delete mode 100644 tests/functional/simple_snapshot/fixtures.py delete mode 100644 tests/functional/simple_snapshot/test_basic_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_changing_strategy_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_check_cols_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_comment_ending_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_cross_schema_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_hard_delete_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_long_text_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_renamed_source_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_select_exclude_snapshot.py delete mode 100644 tests/functional/simple_snapshot/test_slow_query_snapshot.py diff --git a/tests/functional/shared_tests/__init__.py b/tests/functional/adapter/__init__.py similarity index 100% rename from tests/functional/shared_tests/__init__.py rename to tests/functional/adapter/__init__.py diff --git a/tests/functional/shared_tests/test_aliases.py b/tests/functional/adapter/test_aliases.py similarity index 100% rename from tests/functional/shared_tests/test_aliases.py rename to tests/functional/adapter/test_aliases.py diff --git a/tests/functional/shared_tests/test_basic.py b/tests/functional/adapter/test_basic.py similarity index 100% rename from tests/functional/shared_tests/test_basic.py rename to tests/functional/adapter/test_basic.py diff --git a/tests/functional/shared_tests/test_caching.py b/tests/functional/adapter/test_caching.py similarity index 100% rename from tests/functional/shared_tests/test_caching.py rename to tests/functional/adapter/test_caching.py diff --git a/tests/functional/shared_tests/test_clone.py b/tests/functional/adapter/test_clone.py similarity index 100% rename from tests/functional/shared_tests/test_clone.py rename to tests/functional/adapter/test_clone.py diff --git a/tests/functional/shared_tests/test_column_types.py b/tests/functional/adapter/test_column_types.py similarity index 100% rename from tests/functional/shared_tests/test_column_types.py rename to tests/functional/adapter/test_column_types.py diff --git a/tests/functional/shared_tests/test_concurrency.py b/tests/functional/adapter/test_concurrency.py similarity index 100% rename from tests/functional/shared_tests/test_concurrency.py rename to tests/functional/adapter/test_concurrency.py diff --git a/tests/functional/shared_tests/test_constraints.py b/tests/functional/adapter/test_constraints.py similarity index 100% rename from tests/functional/shared_tests/test_constraints.py rename to tests/functional/adapter/test_constraints.py diff --git a/tests/functional/shared_tests/test_data_types.py b/tests/functional/adapter/test_data_types.py similarity index 100% rename from tests/functional/shared_tests/test_data_types.py rename to tests/functional/adapter/test_data_types.py diff --git a/tests/functional/shared_tests/test_debug.py b/tests/functional/adapter/test_debug.py similarity index 100% rename from tests/functional/shared_tests/test_debug.py rename to tests/functional/adapter/test_debug.py diff --git a/tests/functional/shared_tests/test_empty.py b/tests/functional/adapter/test_empty.py similarity index 100% rename from tests/functional/shared_tests/test_empty.py rename to tests/functional/adapter/test_empty.py diff --git a/tests/functional/shared_tests/test_ephemeral.py b/tests/functional/adapter/test_ephemeral.py similarity index 100% rename from tests/functional/shared_tests/test_ephemeral.py rename to tests/functional/adapter/test_ephemeral.py diff --git a/tests/functional/shared_tests/test_grants.py b/tests/functional/adapter/test_grants.py similarity index 100% rename from tests/functional/shared_tests/test_grants.py rename to tests/functional/adapter/test_grants.py diff --git a/tests/functional/shared_tests/test_hooks/data/seed_model.sql b/tests/functional/adapter/test_hooks/data/seed_model.sql similarity index 100% rename from tests/functional/shared_tests/test_hooks/data/seed_model.sql rename to tests/functional/adapter/test_hooks/data/seed_model.sql diff --git a/tests/functional/shared_tests/test_hooks/data/seed_run.sql b/tests/functional/adapter/test_hooks/data/seed_run.sql similarity index 100% rename from tests/functional/shared_tests/test_hooks/data/seed_run.sql rename to tests/functional/adapter/test_hooks/data/seed_run.sql diff --git a/tests/functional/shared_tests/test_hooks/test_hooks.py b/tests/functional/adapter/test_hooks/test_hooks.py similarity index 100% rename from tests/functional/shared_tests/test_hooks/test_hooks.py rename to tests/functional/adapter/test_hooks/test_hooks.py diff --git a/tests/functional/shared_tests/test_incremental.py b/tests/functional/adapter/test_incremental.py similarity index 100% rename from tests/functional/shared_tests/test_incremental.py rename to tests/functional/adapter/test_incremental.py diff --git a/tests/functional/shared_tests/test_persist_docs.py b/tests/functional/adapter/test_persist_docs.py similarity index 100% rename from tests/functional/shared_tests/test_persist_docs.py rename to tests/functional/adapter/test_persist_docs.py diff --git a/tests/functional/shared_tests/test_query_comment.py b/tests/functional/adapter/test_query_comment.py similarity index 100% rename from tests/functional/shared_tests/test_query_comment.py rename to tests/functional/adapter/test_query_comment.py diff --git a/tests/functional/shared_tests/test_relations.py b/tests/functional/adapter/test_relations.py similarity index 100% rename from tests/functional/shared_tests/test_relations.py rename to tests/functional/adapter/test_relations.py diff --git a/tests/functional/shared_tests/test_show.py b/tests/functional/adapter/test_show.py similarity index 100% rename from tests/functional/shared_tests/test_show.py rename to tests/functional/adapter/test_show.py diff --git a/tests/functional/shared_tests/test_simple_copy.py b/tests/functional/adapter/test_simple_copy.py similarity index 100% rename from tests/functional/shared_tests/test_simple_copy.py rename to tests/functional/adapter/test_simple_copy.py diff --git a/tests/functional/shared_tests/test_simple_seed/seed_bom.csv b/tests/functional/adapter/test_simple_seed/seed_bom.csv similarity index 100% rename from tests/functional/shared_tests/test_simple_seed/seed_bom.csv rename to tests/functional/adapter/test_simple_seed/seed_bom.csv diff --git a/tests/functional/shared_tests/test_simple_seed/test_simple_seed.py b/tests/functional/adapter/test_simple_seed/test_simple_seed.py similarity index 100% rename from tests/functional/shared_tests/test_simple_seed/test_simple_seed.py rename to tests/functional/adapter/test_simple_seed/test_simple_seed.py diff --git a/tests/functional/shared_tests/test_simple_snapshot.py b/tests/functional/adapter/test_simple_snapshot.py similarity index 100% rename from tests/functional/shared_tests/test_simple_snapshot.py rename to tests/functional/adapter/test_simple_snapshot.py diff --git a/tests/functional/shared_tests/test_store_test_failures.py b/tests/functional/adapter/test_store_test_failures.py similarity index 100% rename from tests/functional/shared_tests/test_store_test_failures.py rename to tests/functional/adapter/test_store_test_failures.py diff --git a/tests/functional/shared_tests/test_unit_testing.py b/tests/functional/adapter/test_unit_testing.py similarity index 100% rename from tests/functional/shared_tests/test_unit_testing.py rename to tests/functional/adapter/test_unit_testing.py diff --git a/tests/functional/shared_tests/test_utils.py b/tests/functional/adapter/test_utils.py similarity index 100% rename from tests/functional/shared_tests/test_utils.py rename to tests/functional/adapter/test_utils.py diff --git a/tests/functional/simple_snapshot/data/invalidate_postgres.sql b/tests/functional/simple_snapshot/data/invalidate_postgres.sql deleted file mode 100644 index b0bef3c6c..000000000 --- a/tests/functional/simple_snapshot/data/invalidate_postgres.sql +++ /dev/null @@ -1,27 +0,0 @@ - --- update records 11 - 21. Change email and updated_at field -update {schema}.seed set - updated_at = updated_at + interval '1 hour', - email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end -where id >= 10 and id <= 20; - - --- invalidate records 11 - 21 -update {schema}.snapshot_expected set - dbt_valid_to = updated_at + interval '1 hour' -where id >= 10 and id <= 20; - - -update {schema}.snapshot_castillo_expected set - dbt_valid_to = "1-updated_at" + interval '1 hour' -where id >= 10 and id <= 20; - - -update {schema}.snapshot_alvarez_expected set - dbt_valid_to = updated_at + interval '1 hour' -where id >= 10 and id <= 20; - - -update {schema}.snapshot_kelly_expected set - dbt_valid_to = updated_at + interval '1 hour' -where id >= 10 and id <= 20; diff --git a/tests/functional/simple_snapshot/data/seed_pg.sql b/tests/functional/simple_snapshot/data/seed_pg.sql deleted file mode 100644 index a22a2359c..000000000 --- a/tests/functional/simple_snapshot/data/seed_pg.sql +++ /dev/null @@ -1,223 +0,0 @@ - create table {database}.{schema}.seed ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - updated_at TIMESTAMP WITHOUT TIME ZONE -); - -create table {database}.{schema}.snapshot_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- seed inserts --- use the same email for two users to verify that duplicated check_cols values --- are handled appropriately -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(1, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), -(2, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), -(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), -(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), -(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), -(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), -(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), -(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), -(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), -(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), -(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), -(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), -(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), -(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), -(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), -(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), -(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), -(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), -(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), -(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); - - --- populate snapshot table -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed; - - - -create table {database}.{schema}.snapshot_castillo_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - "1-updated_at" TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - --- one entry -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Castillo'; - -create table {database}.{schema}.snapshot_alvarez_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - --- 0 entries -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Alvarez'; - -create table {database}.{schema}.snapshot_kelly_expected ( - id INTEGER, - first_name VARCHAR(50), - last_name VARCHAR(50), - email VARCHAR(50), - gender VARCHAR(50), - ip_address VARCHAR(20), - - -- snapshotting fields - updated_at TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_from TIMESTAMP WITHOUT TIME ZONE, - dbt_valid_to TIMESTAMP WITHOUT TIME ZONE, - dbt_scd_id TEXT, - dbt_updated_at TIMESTAMP WITHOUT TIME ZONE -); - - --- 2 entries -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed where last_name = 'Kelly'; diff --git a/tests/functional/simple_snapshot/data/shared_macros.sql b/tests/functional/simple_snapshot/data/shared_macros.sql deleted file mode 100644 index 9bdfdd264..000000000 --- a/tests/functional/simple_snapshot/data/shared_macros.sql +++ /dev/null @@ -1,80 +0,0 @@ -{% macro get_snapshot_unique_id() -%} - {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} -{%- endmacro %} - -{% macro default__get_snapshot_unique_id() -%} - {% do return("id || '-' || first_name") %} -{%- endmacro %} - -{# - mostly copy+pasted from dbt_utils, but I removed some parameters and added - a query that calls get_snapshot_unique_id -#} -{% test mutually_exclusive_ranges(model) %} - -with base as ( - select {{ get_snapshot_unique_id() }} as dbt_unique_id, - * - from {{ model }} -), -window_functions as ( - - select - dbt_valid_from as lower_bound, - coalesce(dbt_valid_to, '2099-1-1T00:00:01') as upper_bound, - - lead(dbt_valid_from) over ( - partition by dbt_unique_id - order by dbt_valid_from - ) as next_lower_bound, - - row_number() over ( - partition by dbt_unique_id - order by dbt_valid_from desc - ) = 1 as is_last_record - - from base - -), - -calc as ( - -- We want to return records where one of our assumptions fails, so we'll use - -- the `not` function with `and` statements so we can write our assumptions nore cleanly - select - *, - - -- For each record: lower_bound should be < upper_bound. - -- Coalesce it to return an error on the null case (implicit assumption - -- these columns are not_null) - coalesce( - lower_bound < upper_bound, - is_last_record - ) as lower_bound_less_than_upper_bound, - - -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound. - -- Coalesce it to handle null cases for the last record. - coalesce( - upper_bound = next_lower_bound, - is_last_record, - false - ) as upper_bound_equal_to_next_lower_bound - - from window_functions - -), - -validation_errors as ( - - select - * - from calc - - where not( - -- THE FOLLOWING SHOULD BE TRUE -- - lower_bound_less_than_upper_bound - and upper_bound_equal_to_next_lower_bound - ) -) - -select * from validation_errors -{% endtest %} diff --git a/tests/functional/simple_snapshot/data/update.sql b/tests/functional/simple_snapshot/data/update.sql deleted file mode 100644 index 890959f32..000000000 --- a/tests/functional/simple_snapshot/data/update.sql +++ /dev/null @@ -1,261 +0,0 @@ --- insert v2 of the 11 - 21 records - -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20; - - -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20 and last_name = 'Castillo'; - - -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20 and last_name = 'Alvarez'; - - -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id >= 10 and id <= 20 and last_name = 'Kelly'; - --- insert 10 new records -insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values -(21, 'Judy', 'Robinson', 'jrobinsonk@blogs.com', 'Female', '208.21.192.232', '2016-09-18 08:27:38'), -(22, 'Kevin', 'Alvarez', 'kalvarezl@buzzfeed.com', 'Male', '228.106.146.9', '2016-07-29 03:07:37'), -(23, 'Barbara', 'Carr', 'bcarrm@pen.io', 'Female', '106.165.140.17', '2015-09-24 13:27:23'), -(24, 'William', 'Watkins', 'wwatkinsn@guardian.co.uk', 'Male', '78.155.84.6', '2016-03-08 19:13:08'), -(25, 'Judy', 'Cooper', 'jcoopero@google.com.au', 'Female', '24.149.123.184', '2016-10-05 20:49:33'), -(26, 'Shirley', 'Castillo', 'scastillop@samsung.com', 'Female', '129.252.181.12', '2016-06-20 21:12:21'), -(27, 'Justin', 'Harper', 'jharperq@opera.com', 'Male', '131.172.103.218', '2016-05-21 22:56:46'), -(28, 'Marie', 'Medina', 'mmedinar@nhs.uk', 'Female', '188.119.125.67', '2015-10-08 13:44:33'), -(29, 'Kelly', 'Edwards', 'kedwardss@phoca.cz', 'Female', '47.121.157.66', '2015-09-15 06:33:37'), -(30, 'Carl', 'Coleman', 'ccolemant@wikipedia.org', 'Male', '82.227.154.83', '2016-05-26 16:46:40'); - - --- add these new records to the snapshot table -insert into {database}.{schema}.snapshot_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20; - - --- add these new records to the snapshot table -insert into {database}.{schema}.snapshot_castillo_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - "1-updated_at", - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20 and last_name = 'Castillo'; - -insert into {database}.{schema}.snapshot_alvarez_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20 and last_name = 'Alvarez'; - -insert into {database}.{schema}.snapshot_kelly_expected ( - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - dbt_valid_from, - dbt_valid_to, - dbt_updated_at, - dbt_scd_id -) - -select - id, - first_name, - last_name, - email, - gender, - ip_address, - updated_at, - -- fields added by snapshotting - updated_at as dbt_valid_from, - null::timestamp as dbt_valid_to, - updated_at as dbt_updated_at, - md5(id || '-' || first_name || '|' || updated_at::text) as dbt_scd_id -from {database}.{schema}.seed -where id > 20 and last_name = 'Kelly'; diff --git a/tests/functional/simple_snapshot/fixtures.py b/tests/functional/simple_snapshot/fixtures.py deleted file mode 100644 index 04e4905d4..000000000 --- a/tests/functional/simple_snapshot/fixtures.py +++ /dev/null @@ -1,389 +0,0 @@ -snapshots_select__snapshot_sql = """ -{% snapshot snapshot_castillo %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='"1-updated_at"', - ) - }} - select id,first_name,last_name,email,gender,ip_address,updated_at as "1-updated_at" from {{target.database}}.{{schema}}.seed where last_name = 'Castillo' - -{% endsnapshot %} - -{% snapshot snapshot_alvarez %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Alvarez' - -{% endsnapshot %} - - -{% snapshot snapshot_kelly %} - {# This has no target_database set, which is allowed! #} - {{ - config( - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Kelly' - -{% endsnapshot %} -""" - -snapshots_pg_custom__snapshot_sql = """ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='custom', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} -""" - - -macros_custom_snapshot__custom_sql = """ -{# A "custom" strategy that's really just the timestamp one #} -{% macro snapshot_custom_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} - {% set primary_key = config['unique_key'] %} - {% set updated_at = config['updated_at'] %} - - {% set row_changed_expr -%} - ({{ snapshotted_rel }}.{{ updated_at }} < {{ current_rel }}.{{ updated_at }}) - {%- endset %} - - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} - - {% do return({ - "unique_key": primary_key, - "updated_at": updated_at, - "row_changed": row_changed_expr, - "scd_id": scd_id_expr - }) %} -{% endmacro %} -""" - - -models__schema_yml = """ -version: 2 -snapshots: - - name: snapshot_actual - data_tests: - - mutually_exclusive_ranges - config: - meta: - owner: 'a_owner' -""" - -models__schema_with_target_schema_yml = """ -version: 2 -snapshots: - - name: snapshot_actual - data_tests: - - mutually_exclusive_ranges - config: - meta: - owner: 'a_owner' - target_schema: schema_from_schema_yml -""" - -models__ref_snapshot_sql = """ -select * from {{ ref('snapshot_actual') }} -""" - -macros__test_no_overlaps_sql = """ -{% macro get_snapshot_unique_id() -%} - {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} -{%- endmacro %} - -{% macro default__get_snapshot_unique_id() -%} - {% do return("id || '-' || first_name") %} -{%- endmacro %} - -{# - mostly copy+pasted from dbt_utils, but I removed some parameters and added - a query that calls get_snapshot_unique_id -#} -{% test mutually_exclusive_ranges(model) %} - -with base as ( - select {{ get_snapshot_unique_id() }} as dbt_unique_id, - * - from {{ model }} -), -window_functions as ( - - select - dbt_valid_from as lower_bound, - coalesce(dbt_valid_to, '2099-1-1T00:00:01') as upper_bound, - - lead(dbt_valid_from) over ( - partition by dbt_unique_id - order by dbt_valid_from - ) as next_lower_bound, - - row_number() over ( - partition by dbt_unique_id - order by dbt_valid_from desc - ) = 1 as is_last_record - - from base - -), - -calc as ( - -- We want to return records where one of our assumptions fails, so we'll use - -- the `not` function with `and` statements so we can write our assumptions nore cleanly - select - *, - - -- For each record: lower_bound should be < upper_bound. - -- Coalesce it to return an error on the null case (implicit assumption - -- these columns are not_null) - coalesce( - lower_bound < upper_bound, - is_last_record - ) as lower_bound_less_than_upper_bound, - - -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound. - -- Coalesce it to handle null cases for the last record. - coalesce( - upper_bound = next_lower_bound, - is_last_record, - false - ) as upper_bound_equal_to_next_lower_bound - - from window_functions - -), - -validation_errors as ( - - select - * - from calc - - where not( - -- THE FOLLOWING SHOULD BE TRUE -- - lower_bound_less_than_upper_bound - and upper_bound_equal_to_next_lower_bound - ) -) - -select * from validation_errors -{% endtest %} -""" - - -snapshots_select_noconfig__snapshot_sql = """ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} - -{% snapshot snapshot_castillo %} - - {{ - config( - target_database=var('target_database', database), - updated_at='"1-updated_at"', - ) - }} - select id,first_name,last_name,email,gender,ip_address,updated_at as "1-updated_at" from {{target.database}}.{{schema}}.seed where last_name = 'Castillo' - -{% endsnapshot %} - -{% snapshot snapshot_alvarez %} - - {{ - config( - target_database=var('target_database', database), - ) - }} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Alvarez' - -{% endsnapshot %} - - -{% snapshot snapshot_kelly %} - {# This has no target_database set, which is allowed! #} - select * from {{target.database}}.{{schema}}.seed where last_name = 'Kelly' - -{% endsnapshot %} -""" - - -seeds__seed_newcol_csv = """id,first_name,last_name -1,Judith,Kennedy -2,Arthur,Kelly -3,Rachel,Moreno -""" - -seeds__seed_csv = """id,first_name -1,Judith -2,Arthur -3,Rachel -""" - - -snapshots_pg_custom_namespaced__snapshot_sql = """ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='test.custom', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} -""" - -snapshots_pg__snapshot_sql = """ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - - {% if var('invalidate_hard_deletes', 'false') | as_bool %} - {{ config(invalidate_hard_deletes=True) }} - {% endif %} - - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} -""" - -snapshots_pg__snapshot_no_target_schema_sql = """ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', - updated_at='updated_at', - ) - }} - - {% if var('invalidate_hard_deletes', 'false') | as_bool %} - {{ config(invalidate_hard_deletes=True) }} - {% endif %} - - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} -""" - -models_slow__gen_sql = """ - -{{ config(materialized='ephemeral') }} - - -/* - Generates 50 rows that "appear" to update every - second to a query-er. - - 1 2020-04-21 20:44:00-04 0 - 2 2020-04-21 20:43:59-04 59 - 3 2020-04-21 20:43:58-04 58 - 4 2020-04-21 20:43:57-04 57 - - .... 1 second later .... - - 1 2020-04-21 20:44:01-04 1 - 2 2020-04-21 20:44:00-04 0 - 3 2020-04-21 20:43:59-04 59 - 4 2020-04-21 20:43:58-04 58 - - This view uses pg_sleep(2) to make queries against - the view take a non-trivial amount of time - - Use statement_timestamp() as it changes during a transactions. - If we used now() or current_time or similar, then the timestamp - of the start of the transaction would be returned instead. -*/ - -with gen as ( - - select - id, - date_trunc('second', statement_timestamp()) - (interval '1 second' * id) as updated_at - - from generate_series(1, 10) id - -) - -select - id, - updated_at, - extract(seconds from updated_at)::int as seconds - -from gen, pg_sleep(2) -""" - -snapshots_longtext__snapshot_sql = """ -{% snapshot snapshot_actual %} - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.super_long -{% endsnapshot %} -""" - -snapshots_check_col_noconfig__snapshot_sql = """ -{% snapshot snapshot_actual %} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} - -{# This should be exactly the same #} -{% snapshot snapshot_checkall %} - {{ config(check_cols='all') }} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} -""" diff --git a/tests/functional/simple_snapshot/test_basic_snapshot.py b/tests/functional/simple_snapshot/test_basic_snapshot.py deleted file mode 100644 index 6165e8e18..000000000 --- a/tests/functional/simple_snapshot/test_basic_snapshot.py +++ /dev/null @@ -1,373 +0,0 @@ -from datetime import datetime -import os - -from dbt.tests.util import ( - check_relations_equal, - relation_from_name, - run_dbt, - write_file, -) -import pytest -import pytz - -from tests.functional.simple_snapshot.fixtures import ( - macros__test_no_overlaps_sql, - macros_custom_snapshot__custom_sql, - models__ref_snapshot_sql, - models__schema_with_target_schema_yml, - models__schema_yml, - seeds__seed_csv, - seeds__seed_newcol_csv, - snapshots_pg__snapshot_no_target_schema_sql, - snapshots_pg__snapshot_sql, - snapshots_pg_custom__snapshot_sql, - snapshots_pg_custom_namespaced__snapshot_sql, -) - - -snapshots_check_col__snapshot_sql = """ -{% snapshot snapshot_actual %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='check', - check_cols=['email'], - ) - }} - select * from {{target.database}}.{{schema}}.seed - -{% endsnapshot %} - -{# This should be exactly the same #} -{% snapshot snapshot_checkall %} - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='check', - check_cols='all', - ) - }} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} -""" - - -snapshots_check_col_noconfig__snapshot_sql = """ -{% snapshot snapshot_actual %} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} - -{# This should be exactly the same #} -{% snapshot snapshot_checkall %} - {{ config(check_cols='all') }} - select * from {{target.database}}.{{schema}}.seed -{% endsnapshot %} -""" - - -def snapshot_setup(project, num_snapshot_models=1): - path = os.path.join(project.test_data_dir, "seed_pg.sql") - project.run_sql_file(path) - results = run_dbt(["snapshot"]) - assert len(results) == num_snapshot_models - - run_dbt(["test"]) - check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) - - path = os.path.join(project.test_data_dir, "invalidate_postgres.sql") - project.run_sql_file(path) - - path = os.path.join(project.test_data_dir, "update.sql") - project.run_sql_file(path) - - results = run_dbt(["snapshot"]) - assert len(results) == num_snapshot_models - - run_dbt(["test"]) - check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) - - -def ref_setup(project, num_snapshot_models=1): - path = os.path.join(project.test_data_dir, "seed_pg.sql") - project.run_sql_file(path) - results = run_dbt(["snapshot"]) - assert len(results) == num_snapshot_models - - results = run_dbt(["run"]) - assert len(results) == 1 - - -class Basic: - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots_pg__snapshot_sql} - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - @pytest.fixture(scope="class") - def macros(self): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - -class TestBasicSnapshot(Basic): - def test_basic_snapshot(self, project): - snapshot_setup(project, num_snapshot_models=1) - - -class TestBasicRef(Basic): - def test_basic_ref(self, project): - ref_setup(project, num_snapshot_models=1) - - -class TestBasicTargetSchemaConfig(Basic): - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots_pg__snapshot_no_target_schema_sql} - - @pytest.fixture(scope="class") - def project_config_update(self, unique_schema): - return { - "snapshots": { - "test": { - "target_schema": unique_schema + "_alt", - } - } - } - - def test_target_schema(self, project): - manifest = run_dbt(["parse"]) - assert len(manifest.nodes) == 5 - # ensure that the schema in the snapshot node is the same as target_schema - snapshot_id = "snapshot.test.snapshot_actual" - snapshot_node = manifest.nodes[snapshot_id] - assert snapshot_node.schema == f"{project.test_schema}_alt" - assert ( - snapshot_node.relation_name - == f'"{project.database}"."{project.test_schema}_alt"."snapshot_actual"' - ) - assert snapshot_node.meta == {"owner": "a_owner"} - - # write out schema.yml file and check again - write_file(models__schema_with_target_schema_yml, "models", "schema.yml") - manifest = run_dbt(["parse"]) - snapshot_node = manifest.nodes[snapshot_id] - assert snapshot_node.schema == "schema_from_schema_yml" - - -class CustomNamespace: - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots_pg_custom_namespaced__snapshot_sql} - - @pytest.fixture(scope="class") - def macros(self): - return { - "test_no_overlaps.sql": macros__test_no_overlaps_sql, - "custom.sql": macros_custom_snapshot__custom_sql, - } - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - -class TestBasicCustomNamespace(CustomNamespace): - def test_custom_namespace_snapshot(self, project): - snapshot_setup(project, num_snapshot_models=1) - - -class TestRefCustomNamespace(CustomNamespace): - def test_custom_namespace_ref(self, project): - ref_setup(project, num_snapshot_models=1) - - -class CustomSnapshot: - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots_pg_custom__snapshot_sql} - - @pytest.fixture(scope="class") - def macros(self): - return { - "test_no_overlaps.sql": macros__test_no_overlaps_sql, - "custom.sql": macros_custom_snapshot__custom_sql, - } - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - -class TestBasicCustomSnapshot(CustomSnapshot): - def test_custom_snapshot(self, project): - snapshot_setup(project, num_snapshot_models=1) - - -class TestRefCustomSnapshot(CustomSnapshot): - def test_custom_ref(self, project): - ref_setup(project, num_snapshot_models=1) - - -class CheckCols: - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots_check_col__snapshot_sql} - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - @pytest.fixture(scope="class") - def macros(self): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - -class TestBasicCheckCols(CheckCols): - def test_basic_snapshot(self, project): - snapshot_setup(project, num_snapshot_models=2) - - -class TestRefCheckCols(CheckCols): - def test_check_cols_ref(self, project): - ref_setup(project, num_snapshot_models=2) - - -class ConfiguredCheckCols: - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots_check_col_noconfig__snapshot_sql} - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - @pytest.fixture(scope="class") - def macros(self): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - @pytest.fixture(scope="class") - def project_config_update(self): - snapshot_config = { - "snapshots": { - "test": { - "target_schema": "{{ target.schema }}", - "unique_key": "id || '-' || first_name", - "strategy": "check", - "check_cols": ["email"], - } - } - } - return snapshot_config - - -class TestBasicConfiguredCheckCols(ConfiguredCheckCols): - def test_configured_snapshot(self, project): - snapshot_setup(project, num_snapshot_models=2) - - -class TestRefConfiguredCheckCols(ConfiguredCheckCols): - def test_configured_ref(self, project): - ref_setup(project, num_snapshot_models=2) - - -class UpdatedAtCheckCols: - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots_check_col_noconfig__snapshot_sql} - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - @pytest.fixture(scope="class") - def macros(self): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - @pytest.fixture(scope="class") - def project_config_update(self): - snapshot_config = { - "snapshots": { - "test": { - "target_schema": "{{ target.schema }}", - "unique_key": "id || '-' || first_name", - "strategy": "check", - "check_cols": "all", - "updated_at": "updated_at", - } - } - } - return snapshot_config - - -class TestBasicUpdatedAtCheckCols(UpdatedAtCheckCols): - def test_updated_at_snapshot(self, project): - snapshot_setup(project, num_snapshot_models=2) - - snapshot_expected_relation = relation_from_name(project.adapter, "snapshot_expected") - revived_records = project.run_sql( - """ - select id, updated_at, dbt_valid_from from {} - """.format( - snapshot_expected_relation - ), - fetch="all", - ) - for result in revived_records: - # result is a tuple, the updated_at is second and dbt_valid_from is latest - assert isinstance(result[1], datetime) - assert isinstance(result[2], datetime) - assert result[1].replace(tzinfo=pytz.UTC) == result[2].replace(tzinfo=pytz.UTC) - - -class TestRefUpdatedAtCheckCols(UpdatedAtCheckCols): - def test_updated_at_ref(self, project): - ref_setup(project, num_snapshot_models=2) diff --git a/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py b/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py deleted file mode 100644 index d5333536f..000000000 --- a/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py +++ /dev/null @@ -1,127 +0,0 @@ -from dbt.tests.util import check_relations_equal, run_dbt -import pytest - - -snapshot_sql = """ -{% snapshot snapshot_check_cols_new_column %} - {{ - config( - target_database=database, - target_schema=schema, - strategy='check', - unique_key='id', - check_cols=var("check_cols", ['name']), - updated_at="'" ~ var("updated_at") ~ "'::timestamp", - ) - }} - - {% if var('version') == 1 %} - - select 1 as id, 'foo' as name - - {% else %} - - select 1 as id, 'foo' as name, 'bar' as other - - {% endif %} - -{% endsnapshot %} -""" - -expected_csv = """ -id,name,other,dbt_scd_id,dbt_updated_at,dbt_valid_from,dbt_valid_to -1,foo,NULL,0d73ad1b216ad884c9f7395d799c912c,2016-07-01 00:00:00.000,2016-07-01 00:00:00.000,2016-07-02 00:00:00.000 -1,foo,bar,7df3783934a6a707d51254859260b9ff,2016-07-02 00:00:00.000,2016-07-02 00:00:00.000, -""".lstrip() - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot_check_cols_new_column.sql": snapshot_sql} - - -@pytest.fixture(scope="class") -def seeds(): - return {"snapshot_check_cols_new_column_expected.csv": expected_csv} - - -@pytest.fixture(scope="class") -def project_config_update(): - return { - "seeds": { - "quote_columns": False, - "test": { - "snapshot_check_cols_new_column_expected": { - "+column_types": { - "dbt_updated_at": "timestamp without time zone", - "dbt_valid_from": "timestamp without time zone", - "dbt_valid_to": "timestamp without time zone", - }, - }, - }, - }, - } - - -def run_check_cols_snapshot_with_schema_change(project, check_cols_override=None): - """ - Test that snapshots using the "check" strategy and explicit check_cols support adding columns. - - Approach: - 1. Take a snapshot that checks a single non-id column - 2. Add a new column to the data - 3. Take a snapshot that checks the new non-id column too - - As long as no error is thrown, then the snapshot was successful - """ - - check_cols = check_cols_override or ["name", "other"] - - # 1. Create a table that represents the expected data after a series of snapshots - vars_dict = {"version": 1, "updated_at": "2016-07-01"} - results = run_dbt(["seed", "--show", "--vars", str(vars_dict)]) - assert len(results) == 1 - - # Snapshot 1 - # Use only 'name' for check_cols - vars_dict = {"version": 1, "check_cols": [check_cols[0]], "updated_at": "2016-07-01"} - results = run_dbt(["snapshot", "--vars", str(vars_dict)]) - assert len(results) == 1 - - # Snapshot 2 - # Use both 'name' and 'other' for check_cols - vars_dict = {"version": 2, "check_cols": check_cols, "updated_at": "2016-07-02"} - results = run_dbt(["snapshot", "--vars", str(vars_dict)]) - assert len(results) == 1 - - check_relations_equal( - project.adapter, - ["snapshot_check_cols_new_column", "snapshot_check_cols_new_column_expected"], - compare_snapshot_cols=True, - ) - - # Snapshot 3 - # Run it again. Nothing has changed — ensure we don't detect changes - vars_dict = {"version": 2, "check_cols": check_cols, "updated_at": "2016-07-02"} - results = run_dbt(["snapshot", "--vars", str(vars_dict)]) - assert len(results) == 1 - - check_relations_equal( - project.adapter, - ["snapshot_check_cols_new_column", "snapshot_check_cols_new_column_expected"], - compare_snapshot_cols=True, - ) - - -def test_check_cols_snapshot_with_schema_change(project): - run_check_cols_snapshot_with_schema_change(project) - - -def test_check_cols_snapshot_with_schema_change_and_mismatched_casing(project): - """ - Test that this still works if the database-stored version of 'name' + 'other' - differs from the user-configured 'NAME' and 'OTHER' - """ - run_check_cols_snapshot_with_schema_change( - project=project, check_cols_override=["NAME", "OTHER"] - ) diff --git a/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py b/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py deleted file mode 100644 index 5540eee5e..000000000 --- a/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py +++ /dev/null @@ -1,128 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.simple_snapshot.fixtures import models_slow__gen_sql - - -test_snapshots_changing_strategy__test_snapshot_sql = """ - -{# /* - Given the repro case for the snapshot build, we'd - expect to see both records have color='pink' - in their most recent rows. -*/ #} - -with expected as ( - - select 1 as id, 'pink' as color union all - select 2 as id, 'pink' as color - -), - -actual as ( - - select id, color - from {{ ref('my_snapshot') }} - where color = 'pink' - and dbt_valid_to is null - -) - -select * from expected -except -select * from actual - -union all - -select * from actual -except -select * from expected -""" - - -snapshots_changing_strategy__snapshot_sql = """ - -{# - REPRO: - 1. Run with check strategy - 2. Add a new ts column and run with check strategy - 3. Run with timestamp strategy on new ts column - - Expect: new entry is added for changed rows in (3) -#} - - -{% snapshot my_snapshot %} - - {#--------------- Configuration ------------ #} - - {{ config( - target_schema=schema, - unique_key='id' - ) }} - - {% if var('strategy') == 'timestamp' %} - {{ config(strategy='timestamp', updated_at='updated_at') }} - {% else %} - {{ config(strategy='check', check_cols=['color']) }} - {% endif %} - - {#--------------- Test setup ------------ #} - - {% if var('step') == 1 %} - - select 1 as id, 'blue' as color - union all - select 2 as id, 'red' as color - - {% elif var('step') == 2 %} - - -- change id=1 color from blue to green - -- id=2 is unchanged when using the check strategy - select 1 as id, 'green' as color, '2020-01-01'::date as updated_at - union all - select 2 as id, 'red' as color, '2020-01-01'::date as updated_at - - {% elif var('step') == 3 %} - - -- bump timestamp for both records. Expect that after this runs - -- using the timestamp strategy, both ids should have the color - -- 'pink' in the database. This should be in the future b/c we're - -- going to compare to the check timestamp, which will be _now_ - select 1 as id, 'pink' as color, (now() + interval '1 day')::date as updated_at - union all - select 2 as id, 'pink' as color, (now() + interval '1 day')::date as updated_at - - {% endif %} - -{% endsnapshot %} -""" - - -@pytest.fixture(scope="class") -def models(): - return {"gen.sql": models_slow__gen_sql} - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot.sql": snapshots_changing_strategy__snapshot_sql} - - -@pytest.fixture(scope="class") -def tests(): - return {"test_snapshot.sql": test_snapshots_changing_strategy__test_snapshot_sql} - - -def test_changing_strategy(project): - results = run_dbt(["snapshot", "--vars", "{strategy: check, step: 1}"]) - assert len(results) == 1 - - results = run_dbt(["snapshot", "--vars", "{strategy: check, step: 2}"]) - assert len(results) == 1 - - results = run_dbt(["snapshot", "--vars", "{strategy: timestamp, step: 3}"]) - assert len(results) == 1 - - results = run_dbt(["test"]) - assert len(results) == 1 diff --git a/tests/functional/simple_snapshot/test_check_cols_snapshot.py b/tests/functional/simple_snapshot/test_check_cols_snapshot.py deleted file mode 100644 index 2b2673dff..000000000 --- a/tests/functional/simple_snapshot/test_check_cols_snapshot.py +++ /dev/null @@ -1,113 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - - -snapshot_sql = """ -{% snapshot check_cols_cycle %} - - {{ - config( - target_database=database, - target_schema=schema, - unique_key='id', - strategy='check', - check_cols=['color'] - ) - }} - - {% if var('version') == 1 %} - - select 1 as id, 'red' as color union all - select 2 as id, 'green' as color - - {% elif var('version') == 2 %} - - select 1 as id, 'blue' as color union all - select 2 as id, 'green' as color - - {% elif var('version') == 3 %} - - select 1 as id, 'red' as color union all - select 2 as id, 'pink' as color - - {% else %} - {% do exceptions.raise_compiler_error("Got bad version: " ~ var('version')) %} - {% endif %} - -{% endsnapshot %} -""" - -snapshot_test_sql = """ -with query as ( - - -- check that the current value for id=1 is red - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 1 and color = 'red' and dbt_valid_to is null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that the previous 'red' value for id=1 is invalidated - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 1 and color = 'red' and dbt_valid_to is not null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that there's only one current record for id=2 - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 2 and color = 'pink' and dbt_valid_to is null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that the previous value for id=2 is represented - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - where id = 2 and color = 'green' and dbt_valid_to is not null - ) = 1 then 0 else 1 end as failures - - union all - - -- check that there are 5 records total in the table - select case when ( - select count(*) - from {{ ref('check_cols_cycle') }} - ) = 5 then 0 else 1 end as failures - -) - -select * -from query -where failures = 1 -""" - - -@pytest.fixture(scope="class") -def snapshots(): - return {"my_snapshot.sql": snapshot_sql} - - -@pytest.fixture(scope="class") -def tests(): - return {"my_test.sql": snapshot_test_sql} - - -def test_simple_snapshot(project): - results = run_dbt(["snapshot", "--vars", "version: 1"]) - assert len(results) == 1 - - results = run_dbt(["snapshot", "--vars", "version: 2"]) - assert len(results) == 1 - - results = run_dbt(["snapshot", "--vars", "version: 3"]) - assert len(results) == 1 - - run_dbt(["test", "--select", "test_type:singular", "--vars", "version: 3"]) diff --git a/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py b/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py deleted file mode 100644 index 0c99d85e4..000000000 --- a/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py +++ /dev/null @@ -1,114 +0,0 @@ -from dbt.tests.util import check_relations_equal, run_dbt -import pytest - - -snapshot_sql = """ -{% snapshot snapshot_check_cols_updated_at_actual %} - {{ - config( - target_database=database, - target_schema=schema, - unique_key='id', - strategy='check', - check_cols='all', - updated_at="'" ~ var("updated_at") ~ "'::timestamp", - ) - }} - - {% if var('version') == 1 %} - - select 'a' as id, 10 as counter, '2016-01-01T00:00:00Z'::timestamp as timestamp_col union all - select 'b' as id, 20 as counter, '2016-01-01T00:00:00Z'::timestamp as timestamp_col - - {% elif var('version') == 2 %} - - select 'a' as id, 30 as counter, '2016-01-02T00:00:00Z'::timestamp as timestamp_col union all - select 'b' as id, 20 as counter, '2016-01-01T00:00:00Z'::timestamp as timestamp_col union all - select 'c' as id, 40 as counter, '2016-01-02T00:00:00Z'::timestamp as timestamp_col - - {% else %} - - select 'a' as id, 30 as counter, '2016-01-02T00:00:00Z'::timestamp as timestamp_col union all - select 'c' as id, 40 as counter, '2016-01-02T00:00:00Z'::timestamp as timestamp_col - - {% endif %} - -{% endsnapshot %} -""" - -expected_csv = """ -id,counter,timestamp_col,dbt_scd_id,dbt_updated_at,dbt_valid_from,dbt_valid_to -a,10,2016-01-01 00:00:00.000,927354aa091feffd9437ead0bdae7ae1,2016-07-01 00:00:00.000,2016-07-01 00:00:00.000,2016-07-02 00:00:00.000 -b,20,2016-01-01 00:00:00.000,40ace4cbf8629f1720ec8a529ed76f8c,2016-07-01 00:00:00.000,2016-07-01 00:00:00.000, -a,30,2016-01-02 00:00:00.000,e9133f2b302c50e36f43e770944cec9b,2016-07-02 00:00:00.000,2016-07-02 00:00:00.000, -c,40,2016-01-02 00:00:00.000,09d33d35101e788c152f65d0530b6837,2016-07-02 00:00:00.000,2016-07-02 00:00:00.000, -""".lstrip() - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot_check_cols_updated_at_actual.sql": snapshot_sql} - - -@pytest.fixture(scope="class") -def seeds(): - return {"snapshot_check_cols_updated_at_expected.csv": expected_csv} - - -@pytest.fixture(scope="class") -def project_config_update(): - return { - "seeds": { - "quote_columns": False, - "test": { - "snapshot_check_cols_updated_at_expected": { - "+column_types": { - "timestamp_col": "timestamp without time zone", - "dbt_updated_at": "timestamp without time zone", - "dbt_valid_from": "timestamp without time zone", - "dbt_valid_to": "timestamp without time zone", - }, - }, - }, - }, - } - - -def test_simple_snapshot(project): - """ - Test that the `dbt_updated_at` column reflects the `updated_at` timestamp expression in the config. - - Approach: - 1. Create a table that represents the expected data after a series of snapshots - - Use dbt seed to create the expected relation (`snapshot_check_cols_updated_at_expected`) - 2. Execute a series of snapshots to create the data - - Use a series of (3) dbt snapshot commands to create the actual relation (`snapshot_check_cols_updated_at_actual`) - - The logic can switch between 3 different versions of the data (depending on the `version` number) - - The `updated_at` value is passed in via `--vars` and cast to a timestamp in the snapshot config - 3. Compare the two relations for equality - """ - - # 1. Create a table that represents the expected data after a series of snapshots - results = run_dbt(["seed", "--show", "--vars", "{version: 1, updated_at: 2016-07-01}"]) - assert len(results) == 1 - - # 2. Execute a series of snapshots to create the data - - # Snapshot day 1 - results = run_dbt(["snapshot", "--vars", "{version: 1, updated_at: 2016-07-01}"]) - assert len(results) == 1 - - # Snapshot day 2 - results = run_dbt(["snapshot", "--vars", "{version: 2, updated_at: 2016-07-02}"]) - assert len(results) == 1 - - # Snapshot day 3 - results = run_dbt(["snapshot", "--vars", "{version: 3, updated_at: 2016-07-03}"]) - assert len(results) == 1 - - # 3. Compare the two relations for equality - check_relations_equal( - project.adapter, - ["snapshot_check_cols_updated_at_actual", "snapshot_check_cols_updated_at_expected"], - compare_snapshot_cols=True, - ) diff --git a/tests/functional/simple_snapshot/test_comment_ending_snapshot.py b/tests/functional/simple_snapshot/test_comment_ending_snapshot.py deleted file mode 100644 index ab21b641b..000000000 --- a/tests/functional/simple_snapshot/test_comment_ending_snapshot.py +++ /dev/null @@ -1,36 +0,0 @@ -import os - -from dbt.tests.util import run_dbt -import pytest - - -snapshots_with_comment_at_end__snapshot_sql = """ -{% snapshot snapshot_actual %} - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id', - strategy='check', - check_cols=['email'], - ) - }} - select * from {{target.database}}.{{schema}}.seed - -- Test comment to prevent recurrence of https://github.com/dbt-labs/dbt-core/issues/6781 -{% endsnapshot %} -""" - - -class TestSnapshotsWithCommentAtEnd: - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots_with_comment_at_end__snapshot_sql} - - def test_comment_ending(self, project): - path = os.path.join(project.test_data_dir, "seed_pg.sql") - project.run_sql_file(path) - # N.B. Snapshot is run twice to ensure snapshot_check_all_get_existing_columns is fully run - # (it exits early if the table doesn't already exist) - run_dbt(["snapshot"]) - results = run_dbt(["snapshot"]) - assert len(results) == 1 diff --git a/tests/functional/simple_snapshot/test_cross_schema_snapshot.py b/tests/functional/simple_snapshot/test_cross_schema_snapshot.py deleted file mode 100644 index 1072a5aa8..000000000 --- a/tests/functional/simple_snapshot/test_cross_schema_snapshot.py +++ /dev/null @@ -1,48 +0,0 @@ -import os - -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.simple_snapshot.fixtures import ( - macros__test_no_overlaps_sql, - models__ref_snapshot_sql, - models__schema_yml, - snapshots_pg__snapshot_sql, -) - - -NUM_SNAPSHOT_MODELS = 1 - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot.sql": snapshots_pg__snapshot_sql} - - -@pytest.fixture(scope="class") -def models(): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - -@pytest.fixture(scope="class") -def macros(): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - -def test_cross_schema_snapshot(project): - # populate seed and snapshot tables - path = os.path.join(project.test_data_dir, "seed_pg.sql") - project.run_sql_file(path) - - target_schema = "{}_snapshotted".format(project.test_schema) - - # create a snapshot using the new schema - results = run_dbt(["snapshot", "--vars", '{{"target_schema": "{}"}}'.format(target_schema)]) - assert len(results) == NUM_SNAPSHOT_MODELS - - # run dbt from test_schema with a ref to to new target_schema - results = run_dbt(["run", "--vars", '{{"target_schema": {}}}'.format(target_schema)]) - assert len(results) == 1 diff --git a/tests/functional/simple_snapshot/test_hard_delete_snapshot.py b/tests/functional/simple_snapshot/test_hard_delete_snapshot.py deleted file mode 100644 index ab25bbfab..000000000 --- a/tests/functional/simple_snapshot/test_hard_delete_snapshot.py +++ /dev/null @@ -1,192 +0,0 @@ -from datetime import datetime, timedelta -import os - -from dbt.tests.adapter.utils.test_current_timestamp import is_aware -from dbt.tests.util import run_dbt, check_relations_equal -import pytest -import pytz - -from tests.functional.simple_snapshot.fixtures import ( - macros__test_no_overlaps_sql, - models__ref_snapshot_sql, - models__schema_yml, - snapshots_pg__snapshot_sql, -) - - -# These tests uses the same seed data, containing 20 records of which we hard delete the last 10. -# These deleted records set the dbt_valid_to to time the snapshot was ran. - - -def convert_to_aware(d: datetime) -> datetime: - # There are two types of datetime objects in Python: naive and aware - # Assume any dbt snapshot timestamp that is naive is meant to represent UTC - if d is None: - return d - elif is_aware(d): - return d - else: - return d.replace(tzinfo=pytz.UTC) - - -def is_close_datetime( - dt1: datetime, dt2: datetime, atol: timedelta = timedelta(microseconds=1) -) -> bool: - # Similar to pytest.approx, math.isclose, and numpy.isclose - # Use an absolute tolerance to compare datetimes that may not be perfectly equal. - # Two None values will compare as equal. - if dt1 is None and dt2 is None: - return True - elif dt1 is not None and dt2 is not None: - return (dt1 > (dt2 - atol)) and (dt1 < (dt2 + atol)) - else: - return False - - -def datetime_snapshot(): - NUM_SNAPSHOT_MODELS = 1 - begin_snapshot_datetime = datetime.now(pytz.UTC) - results = run_dbt(["snapshot", "--vars", "{invalidate_hard_deletes: true}"]) - assert len(results) == NUM_SNAPSHOT_MODELS - - return begin_snapshot_datetime - - -@pytest.fixture(scope="class", autouse=True) -def setUp(project): - path = os.path.join(project.test_data_dir, "seed_pg.sql") - project.run_sql_file(path) - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot.sql": snapshots_pg__snapshot_sql} - - -@pytest.fixture(scope="class") -def models(): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - -@pytest.fixture(scope="class") -def macros(): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - -def test_snapshot_hard_delete(project): - # run the first snapshot - datetime_snapshot() - - check_relations_equal(project.adapter, ["snapshot_expected", "snapshot_actual"]) - - invalidated_snapshot_datetime = None - revived_snapshot_datetime = None - - # hard delete last 10 records - project.run_sql( - "delete from {}.{}.seed where id >= 10;".format(project.database, project.test_schema) - ) - - # snapshot and assert invalidated - invalidated_snapshot_datetime = datetime_snapshot() - - snapshotted = project.run_sql( - """ - select - id, - dbt_valid_to - from {}.{}.snapshot_actual - order by id - """.format( - project.database, project.test_schema - ), - fetch="all", - ) - - assert len(snapshotted) == 20 - for result in snapshotted[10:]: - # result is a tuple, the dbt_valid_to column is the latest - assert isinstance(result[-1], datetime) - dbt_valid_to = convert_to_aware(result[-1]) - - # Plenty of wiggle room if clocks aren't perfectly sync'd, etc - assert is_close_datetime( - dbt_valid_to, invalidated_snapshot_datetime, timedelta(minutes=1) - ), f"SQL timestamp {dbt_valid_to.isoformat()} is not close enough to Python UTC {invalidated_snapshot_datetime.isoformat()}" - - # revive records - # Timestamp must have microseconds for tests below to be meaningful - # Assume `updated_at` is TIMESTAMP WITHOUT TIME ZONE that implicitly represents UTC - revival_timestamp = datetime.now(pytz.UTC).strftime("%Y-%m-%d %H:%M:%S.%f") - project.run_sql( - """ - insert into {}.{}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values - (10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '{}'), - (11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '{}') - """.format( - project.database, project.test_schema, revival_timestamp, revival_timestamp - ) - ) - - # snapshot and assert records were revived - # Note: the revived_snapshot_datetime here is later than the revival_timestamp above - revived_snapshot_datetime = datetime_snapshot() - - # records which weren't revived (id != 10, 11) - # dbt_valid_to is not null - invalidated_records = project.run_sql( - """ - select - id, - dbt_valid_to - from {}.{}.snapshot_actual - where dbt_valid_to is not null - order by id - """.format( - project.database, project.test_schema - ), - fetch="all", - ) - - assert len(invalidated_records) == 11 - for result in invalidated_records: - # result is a tuple, the dbt_valid_to column is the latest - assert isinstance(result[1], datetime) - dbt_valid_to = convert_to_aware(result[1]) - - # Plenty of wiggle room if clocks aren't perfectly sync'd, etc - assert is_close_datetime( - dbt_valid_to, invalidated_snapshot_datetime, timedelta(minutes=1) - ), f"SQL timestamp {dbt_valid_to.isoformat()} is not close enough to Python UTC {invalidated_snapshot_datetime.isoformat()}" - - # records which were revived (id = 10, 11) - # dbt_valid_to is null - revived_records = project.run_sql( - """ - select - id, - dbt_valid_from, - dbt_valid_to - from {}.{}.snapshot_actual - where dbt_valid_to is null - and id IN (10, 11) - """.format( - project.database, project.test_schema - ), - fetch="all", - ) - - assert len(revived_records) == 2 - for result in revived_records: - # result is a tuple, the dbt_valid_from is second and dbt_valid_to is latest - # dbt_valid_from is the same as the 'updated_at' added in the revived_rows - # dbt_valid_to is null - assert isinstance(result[1], datetime) - dbt_valid_from = convert_to_aware(result[1]) - dbt_valid_to = result[2] - - assert dbt_valid_from <= revived_snapshot_datetime - assert dbt_valid_to is None diff --git a/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py b/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py deleted file mode 100644 index 1ee8fa40d..000000000 --- a/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py +++ /dev/null @@ -1,67 +0,0 @@ -import os - -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.simple_snapshot.fixtures import ( - macros__test_no_overlaps_sql, - macros_custom_snapshot__custom_sql, - models__ref_snapshot_sql, - models__schema_yml, - seeds__seed_csv, - seeds__seed_newcol_csv, -) - - -NUM_SNAPSHOT_MODELS = 1 - - -snapshots_pg_custom_invalid__snapshot_sql = """ -{% snapshot snapshot_actual %} - {# this custom strategy does not exist in the 'dbt' package #} - {{ - config( - target_database=var('target_database', database), - target_schema=var('target_schema', schema), - unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='dbt.custom', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{target.schema}}.seed - -{% endsnapshot %} -""" - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshots.sql": snapshots_pg_custom_invalid__snapshot_sql} - - -@pytest.fixture(scope="class") -def macros(): - return { - "test_no_overlaps.sql": macros__test_no_overlaps_sql, - "custom.sql": macros_custom_snapshot__custom_sql, - } - - -@pytest.fixture(scope="class") -def models(): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - -@pytest.fixture(scope="class") -def seeds(): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - -def test_custom_snapshot_invalid_namespace(project): - path = os.path.join(project.test_data_dir, "seed_pg.sql") - project.run_sql_file(path) - results = run_dbt(["snapshot"], expect_pass=False) - assert len(results) == NUM_SNAPSHOT_MODELS diff --git a/tests/functional/simple_snapshot/test_long_text_snapshot.py b/tests/functional/simple_snapshot/test_long_text_snapshot.py deleted file mode 100644 index 0793a3fcc..000000000 --- a/tests/functional/simple_snapshot/test_long_text_snapshot.py +++ /dev/null @@ -1,70 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.simple_snapshot.fixtures import ( - macros__test_no_overlaps_sql, - models__ref_snapshot_sql, - models__schema_yml, -) - - -seed_longtext_sql = """ -create table {database}.{schema}.super_long ( - id INTEGER, - longstring TEXT, - updated_at TIMESTAMP WITHOUT TIME ZONE -); - -insert into {database}.{schema}.super_long (id, longstring, updated_at) VALUES -(1, 'short', current_timestamp), -(2, repeat('a', 500), current_timestamp); -""" - -snapshots_longtext__snapshot_sql = """ -{% snapshot snapshot_actual %} - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id', - strategy='timestamp', - updated_at='updated_at', - ) - }} - select * from {{target.database}}.{{schema}}.super_long -{% endsnapshot %} -""" - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot.sql": snapshots_longtext__snapshot_sql} - - -@pytest.fixture(scope="class") -def models(): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - -@pytest.fixture(scope="class") -def macros(): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - -def test_long_text(project): - project.run_sql(seed_longtext_sql) - - results = run_dbt(["snapshot"]) - assert len(results) == 1 - - with project.adapter.connection_named("test"): - status, results = project.adapter.execute( - "select * from {}.{}.snapshot_actual".format(project.database, project.test_schema), - fetch=True, - ) - assert len(results) == 2 - got_names = set(r.get("longstring") for r in results) - assert got_names == {"a" * 500, "short"} diff --git a/tests/functional/simple_snapshot/test_renamed_source_snapshot.py b/tests/functional/simple_snapshot/test_renamed_source_snapshot.py deleted file mode 100644 index 23db614bb..000000000 --- a/tests/functional/simple_snapshot/test_renamed_source_snapshot.py +++ /dev/null @@ -1,74 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.simple_snapshot.fixtures import ( - macros__test_no_overlaps_sql, - macros_custom_snapshot__custom_sql, - seeds__seed_csv, - seeds__seed_newcol_csv, -) - - -snapshots_checkall__snapshot_sql = """ -{% snapshot my_snapshot %} - {{ config(check_cols='all', unique_key='id', strategy='check', target_database=database, target_schema=schema) }} - select * from {{ ref(var('seed_name', 'seed')) }} -{% endsnapshot %} -""" - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot.sql": snapshots_checkall__snapshot_sql} - - -@pytest.fixture(scope="class") -def macros(): - return { - "test_no_overlaps.sql": macros__test_no_overlaps_sql, - "custom.sql": macros_custom_snapshot__custom_sql, - } - - -@pytest.fixture(scope="class") -def seeds(): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - -def test_renamed_source(project): - run_dbt(["seed"]) - run_dbt(["snapshot"]) - database = project.database - results = project.run_sql( - "select * from {}.{}.my_snapshot".format(database, project.test_schema), - fetch="all", - ) - assert len(results) == 3 - for result in results: - assert len(result) == 6 - - # over ride the ref var in the snapshot definition to use a seed with an additional column, last_name - run_dbt(["snapshot", "--vars", "{seed_name: seed_newcol}"]) - results = project.run_sql( - "select * from {}.{}.my_snapshot where last_name is not NULL".format( - database, project.test_schema - ), - fetch="all", - ) - assert len(results) == 3 - - for result in results: - # new column - assert len(result) == 7 - assert result[-1] is not None - - results = project.run_sql( - "select * from {}.{}.my_snapshot where last_name is NULL".format( - database, project.test_schema - ), - fetch="all", - ) - assert len(results) == 3 - for result in results: - # new column - assert len(result) == 7 diff --git a/tests/functional/simple_snapshot/test_select_exclude_snapshot.py b/tests/functional/simple_snapshot/test_select_exclude_snapshot.py deleted file mode 100644 index ac2b4bc92..000000000 --- a/tests/functional/simple_snapshot/test_select_exclude_snapshot.py +++ /dev/null @@ -1,161 +0,0 @@ -import os - -from dbt.tests.util import ( - check_relations_equal, - check_table_does_not_exist, - run_dbt, -) -import pytest - -from tests.functional.simple_snapshot.fixtures import ( - macros__test_no_overlaps_sql, - models__ref_snapshot_sql, - models__schema_yml, - seeds__seed_csv, - seeds__seed_newcol_csv, - snapshots_pg__snapshot_sql, - snapshots_select__snapshot_sql, - snapshots_select_noconfig__snapshot_sql, -) - - -def all_snapshots(project): - path = os.path.join(project.test_data_dir, "seed_pg.sql") - project.run_sql_file(path) - - results = run_dbt(["snapshot"]) - assert len(results) == 4 - - check_relations_equal(project.adapter, ["snapshot_castillo", "snapshot_castillo_expected"]) - check_relations_equal(project.adapter, ["snapshot_alvarez", "snapshot_alvarez_expected"]) - check_relations_equal(project.adapter, ["snapshot_kelly", "snapshot_kelly_expected"]) - check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) - - path = os.path.join(project.test_data_dir, "invalidate_postgres.sql") - project.run_sql_file(path) - - path = os.path.join(project.test_data_dir, "update.sql") - project.run_sql_file(path) - - results = run_dbt(["snapshot"]) - assert len(results) == 4 - check_relations_equal(project.adapter, ["snapshot_castillo", "snapshot_castillo_expected"]) - check_relations_equal(project.adapter, ["snapshot_alvarez", "snapshot_alvarez_expected"]) - check_relations_equal(project.adapter, ["snapshot_kelly", "snapshot_kelly_expected"]) - check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) - - -def exclude_snapshots(project): - path = os.path.join(project.test_data_dir, "seed_pg.sql") - project.run_sql_file(path) - results = run_dbt(["snapshot", "--exclude", "snapshot_castillo"]) - assert len(results) == 3 - - check_table_does_not_exist(project.adapter, "snapshot_castillo") - check_relations_equal(project.adapter, ["snapshot_alvarez", "snapshot_alvarez_expected"]) - check_relations_equal(project.adapter, ["snapshot_kelly", "snapshot_kelly_expected"]) - check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) - - -def select_snapshots(project): - path = os.path.join(project.test_data_dir, "seed_pg.sql") - project.run_sql_file(path) - results = run_dbt(["snapshot", "--select", "snapshot_castillo"]) - assert len(results) == 1 - - check_relations_equal(project.adapter, ["snapshot_castillo", "snapshot_castillo_expected"]) - check_table_does_not_exist(project.adapter, "snapshot_alvarez") - check_table_does_not_exist(project.adapter, "snapshot_kelly") - check_table_does_not_exist(project.adapter, "snapshot_actual") - - -# all of the tests below use one of both of the above tests with -# various combinations of snapshots and macros -class SelectBasicSetup: - @pytest.fixture(scope="class") - def snapshots(self): - return { - "snapshot.sql": snapshots_pg__snapshot_sql, - "snapshot_select.sql": snapshots_select__snapshot_sql, - } - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - @pytest.fixture(scope="class") - def macros(self): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - -class TestAllBasic(SelectBasicSetup): - def test_all_snapshots(self, project): - all_snapshots(project) - - -class TestExcludeBasic(SelectBasicSetup): - def test_exclude_snapshots(self, project): - exclude_snapshots(project) - - -class TestSelectBasic(SelectBasicSetup): - def test_select_snapshots(self, project): - select_snapshots(project) - - -class SelectConfiguredSetup: - @pytest.fixture(scope="class") - def snapshots(self): - return {"snapshot.sql": snapshots_select_noconfig__snapshot_sql} - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed_newcol.csv": seeds__seed_newcol_csv, "seed.csv": seeds__seed_csv} - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "ref_snapshot.sql": models__ref_snapshot_sql, - } - - @pytest.fixture(scope="class") - def macros(self): - return {"test_no_overlaps.sql": macros__test_no_overlaps_sql} - - # TODO: don't have access to project here so this breaks - @pytest.fixture(scope="class") - def project_config_update(self): - snapshot_config = { - "snapshots": { - "test": { - "target_schema": "{{ target.schema }}", - "unique_key": "id || '-' || first_name", - "strategy": "timestamp", - "updated_at": "updated_at", - } - } - } - return snapshot_config - - -class TestConfigured(SelectConfiguredSetup): - def test_all_configured_snapshots(self, project): - all_snapshots(project) - - -class TestConfiguredExclude(SelectConfiguredSetup): - def test_exclude_configured_snapshots(self, project): - exclude_snapshots(project) - - -class TestConfiguredSelect(SelectConfiguredSetup): - def test_select_configured_snapshots(self, project): - select_snapshots(project) diff --git a/tests/functional/simple_snapshot/test_slow_query_snapshot.py b/tests/functional/simple_snapshot/test_slow_query_snapshot.py deleted file mode 100644 index a65b6cb3a..000000000 --- a/tests/functional/simple_snapshot/test_slow_query_snapshot.py +++ /dev/null @@ -1,82 +0,0 @@ -from dbt.tests.util import run_dbt -import pytest - -from tests.functional.simple_snapshot.fixtures import models_slow__gen_sql - - -snapshots_slow__snapshot_sql = """ - -{% snapshot my_slow_snapshot %} - - {{ - config( - target_database=var('target_database', database), - target_schema=schema, - unique_key='id', - strategy='timestamp', - updated_at='updated_at' - ) - }} - - select - id, - updated_at, - seconds - - from {{ ref('gen') }} - -{% endsnapshot %} -""" - - -test_snapshots_slow__test_timestamps_sql = """ - -/* - Assert that the dbt_valid_from of the latest record - is equal to the dbt_valid_to of the previous record -*/ - -with snapshot as ( - - select * from {{ ref('my_slow_snapshot') }} - -) - -select - snap1.id, - snap1.dbt_valid_from as new_valid_from, - snap2.dbt_valid_from as old_valid_from, - snap2.dbt_valid_to as old_valid_to - -from snapshot as snap1 -join snapshot as snap2 on snap1.id = snap2.id -where snap1.dbt_valid_to is null - and snap2.dbt_valid_to is not null - and snap1.dbt_valid_from != snap2.dbt_valid_to -""" - - -@pytest.fixture(scope="class") -def models(): - return {"gen.sql": models_slow__gen_sql} - - -@pytest.fixture(scope="class") -def snapshots(): - return {"snapshot.sql": snapshots_slow__snapshot_sql} - - -@pytest.fixture(scope="class") -def tests(): - return {"test_timestamps.sql": test_snapshots_slow__test_timestamps_sql} - - -def test_slow(project): - results = run_dbt(["snapshot"]) - assert len(results) == 1 - - results = run_dbt(["snapshot"]) - assert len(results) == 1 - - results = run_dbt(["test"]) - assert len(results) == 1 From bc042ce8fa58dad7056dba93f804ca7dfa2df764 Mon Sep 17 00:00:00 2001 From: Peter Webb <peterallenwebb@gmail.com> Date: Tue, 16 Jul 2024 18:17:03 -0400 Subject: [PATCH 084/114] Add record/replay support (#123) --- .../Under the Hood-20240716-172442.yaml | 6 ++++ dbt/adapters/postgres/connections.py | 34 ++++++++++++++----- dbt/adapters/postgres/record/__init__.py | 2 ++ dbt/adapters/postgres/record/cursor/cursor.py | 15 ++++++++ dbt/adapters/postgres/record/cursor/status.py | 21 ++++++++++++ dbt/adapters/postgres/record/handle.py | 12 +++++++ 6 files changed, 81 insertions(+), 9 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20240716-172442.yaml create mode 100644 dbt/adapters/postgres/record/__init__.py create mode 100644 dbt/adapters/postgres/record/cursor/cursor.py create mode 100644 dbt/adapters/postgres/record/cursor/status.py create mode 100644 dbt/adapters/postgres/record/handle.py diff --git a/.changes/unreleased/Under the Hood-20240716-172442.yaml b/.changes/unreleased/Under the Hood-20240716-172442.yaml new file mode 100644 index 000000000..8777edbb7 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240716-172442.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Add support for experimental record/replay testing. +time: 2024-07-16T17:24:42.271859-04:00 +custom: + Author: peterallenwebb + Issue: "123" diff --git a/dbt/adapters/postgres/connections.py b/dbt/adapters/postgres/connections.py index 83f269579..e8f0abe55 100644 --- a/dbt/adapters/postgres/connections.py +++ b/dbt/adapters/postgres/connections.py @@ -5,10 +5,12 @@ from dbt.adapters.contracts.connection import AdapterResponse, Credentials from dbt.adapters.events.logging import AdapterLogger from dbt.adapters.events.types import TypeCodeNotFound +from dbt.adapters.postgres.record import PostgresRecordReplayHandle from dbt.adapters.sql import SQLConnectionManager from dbt_common.exceptions import DbtDatabaseError, DbtRuntimeError from dbt_common.events.functions import warn_or_error from dbt_common.helper_types import Port +from dbt_common.record import get_record_mode_from_env, RecorderMode from mashumaro.jsonschema.annotations import Maximum, Minimum import psycopg2 from typing_extensions import Annotated @@ -132,17 +134,31 @@ def open(cls, connection): kwargs["application_name"] = credentials.application_name def connect(): - handle = psycopg2.connect( - dbname=credentials.database, - user=credentials.user, - host=credentials.host, - password=credentials.password, - port=credentials.port, - connect_timeout=credentials.connect_timeout, - **kwargs, - ) + handle = None + + # In replay mode, we won't connect to a real database at all, while + # in record and diff modes we do, but insert an intermediate handle + # object which monitors native connection activity. + rec_mode = get_record_mode_from_env() + if rec_mode != RecorderMode.REPLAY: + handle = psycopg2.connect( + dbname=credentials.database, + user=credentials.user, + host=credentials.host, + password=credentials.password, + port=credentials.port, + connect_timeout=credentials.connect_timeout, + **kwargs, + ) + + if rec_mode is not None: + # If using the record/replay mechanism, regardless of mode, we + # use a wrapper. + handle = PostgresRecordReplayHandle(handle, connection) + if credentials.role: handle.cursor().execute("set role {}".format(credentials.role)) + return handle retryable_exceptions = [ diff --git a/dbt/adapters/postgres/record/__init__.py b/dbt/adapters/postgres/record/__init__.py new file mode 100644 index 000000000..9b8b9b3a3 --- /dev/null +++ b/dbt/adapters/postgres/record/__init__.py @@ -0,0 +1,2 @@ +from dbt.adapters.postgres.record.cursor.cursor import PostgresRecordReplayCursor +from dbt.adapters.postgres.record.handle import PostgresRecordReplayHandle diff --git a/dbt/adapters/postgres/record/cursor/cursor.py b/dbt/adapters/postgres/record/cursor/cursor.py new file mode 100644 index 000000000..a096c9f56 --- /dev/null +++ b/dbt/adapters/postgres/record/cursor/cursor.py @@ -0,0 +1,15 @@ +from dbt_common.record import record_function + +from dbt.adapters.record import RecordReplayCursor + +from dbt.adapters.postgres.record.cursor.status import CursorGetStatusMessageRecord + + +class PostgresRecordReplayCursor(RecordReplayCursor): + """A custom extension of RecordReplayCursor that adds the statusmessage + property which is specific to psycopg.""" + + @property + @record_function(CursorGetStatusMessageRecord, method=True, id_field_name="connection_name") + def statusmessage(self): + return self.native_cursor.statusmessage diff --git a/dbt/adapters/postgres/record/cursor/status.py b/dbt/adapters/postgres/record/cursor/status.py new file mode 100644 index 000000000..1e8d9620c --- /dev/null +++ b/dbt/adapters/postgres/record/cursor/status.py @@ -0,0 +1,21 @@ +import dataclasses +from typing import Optional + +from dbt_common.record import Record, Recorder + + +@dataclasses.dataclass +class CursorGetStatusMessageParams: + connection_name: str + + +@dataclasses.dataclass +class CursorGetStatusMessageResult: + msg: Optional[str] + + +@Recorder.register_record_type +class CursorGetStatusMessageRecord(Record): + params_cls = CursorGetStatusMessageParams + result_cls = CursorGetStatusMessageResult + group = "Database" diff --git a/dbt/adapters/postgres/record/handle.py b/dbt/adapters/postgres/record/handle.py new file mode 100644 index 000000000..119dc2f13 --- /dev/null +++ b/dbt/adapters/postgres/record/handle.py @@ -0,0 +1,12 @@ +from dbt.adapters.record import RecordReplayHandle + +from dbt.adapters.postgres.record.cursor.cursor import PostgresRecordReplayCursor + + +class PostgresRecordReplayHandle(RecordReplayHandle): + """A custom extension of RecordReplayHandle that returns + a psycopg-specific PostgresRecordReplayCursor object.""" + + def cursor(self): + cursor = None if self.native_handle is None else self.native_handle.cursor() + return PostgresRecordReplayCursor(cursor, self.connection) From 66ba1c2a84c89dfc6e5f8777397daaa6bebae104 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 16 Jul 2024 18:28:51 -0400 Subject: [PATCH 085/114] Simplify hatch setup (#126) --- .github/workflows/integration-tests.yml | 2 +- .github/workflows/release_prep_hatch.yml | 10 +-- .github/workflows/unit-tests.yml | 2 +- CONTRIBUTING.md | 79 +++++++++++++++++++----- pyproject.toml | 42 +++++-------- 5 files changed, 85 insertions(+), 50 deletions(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index d73671908..e3390fbd6 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -91,7 +91,7 @@ jobs: python-version: ${{ matrix.python-version }} - name: Run integration tests - run: hatch run integration-tests:all + run: hatch run integration-tests env: POSTGRES_TEST_HOST: localhost POSTGRES_TEST_PORT: 5432 diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml index 8e9ded04a..ab343b45b 100644 --- a/.github/workflows/release_prep_hatch.yml +++ b/.github/workflows/release_prep_hatch.yml @@ -31,7 +31,7 @@ # 1. Bump the version if it has not been bumped # 2. Generate the changelog (via changie) if there is no markdown file for this version name: "Release prep" -run-name: "Release prep: Generate changelog and bump ${{ inputs.package }} to ${{ inputs.version }} for release to ${{ inputs.deploy-to }}" +run-name: "Release prep: Generate changelog and bump to ${{ inputs.version }} for release to ${{ inputs.deploy-to }}" on: workflow_call: inputs: @@ -342,7 +342,7 @@ jobs: uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main - name: "Run unit tests" - run: hatch run unit-tests:all + run: hatch run unit-tests integration-tests: runs-on: ubuntu-latest @@ -387,7 +387,7 @@ jobs: uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main - name: "Run integration tests" - run: hatch run integration-tests:all + run: hatch run integration-tests env: POSTGRES_TEST_HOST: localhost POSTGRES_TEST_PORT: 5432 @@ -464,6 +464,6 @@ jobs: run: echo "sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT # if this is a real release and a release branch was created, delete it - - name: "Delete release branch: ${{ needs.branch.outputs.name }}" + - name: "Delete release branch: ${{ needs.release-branch.outputs.name }}" if: ${{ inputs.deploy-to == 'prod' && inputs.is-nightly-release == 'false' && needs.release-branch.outputs.name != '' }} - run: git push origin -d ${{ needs.branch.outputs.name }} + run: git push origin -d ${{ needs.release-branch.outputs.name }} diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 91d8c5783..e171ff787 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -37,5 +37,5 @@ jobs: python-version: ${{ matrix.python-version }} - name: Run unit tests - run: hatch run unit-tests:all + run: hatch run unit-tests shell: bash diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 427d2de52..9d925156f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -66,26 +66,42 @@ Rather than forking `dbt-labs/dbt-postgres`, use `dbt-labs/dbt-postgres` directl ### Installation -1. Ensure the latest version of `pip` is installed: +1. Ensure the latest versions of `pip` and `hatch` are installed: ```shell - pip install --upgrade pip + pip install --user --upgrade pip hatch ``` -2. Configure and activate a virtual environment using `virtualenv` as described in -[Setting up an environment](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md#setting-up-an-environment) -3. Install `dbt-postgres` and development dependencies in the virtual environment +2. This step is optional, but it's recommended. Configure `hatch` to create its virtual environments in the project. Add this block to your `hatch` `config.toml` file: + ```toml + # MacOS: ~/Library/Application Support/hatch/config.toml + [dirs.env] + virtual = ".hatch" + ``` + This makes `hatch` create all virtual environments in the project root inside of the directory `/.hatch`, similar to `/.tox` for `tox`. + It also makes it easier to add this environment as a runner in common IDEs like VSCode and PyCharm. +3. Create a `hatch` environment with all of the development dependencies and activate it: + ```shell + hatch run setup + hatch shell + ``` +4. Run any commands within the virtual environment by prefixing the command with `hatch run`: ```shell - pip install -e .[dev] + hatch run <command> ``` When `dbt-postgres` is installed this way, any changes made to the `dbt-postgres` source code will be reflected in the virtual environment immediately. - ## Testing -`dbt-postgres` contains [unit](https://github.com/dbt-labs/dbt-postgres/tree/main/tests/unit) -and [functional](https://github.com/dbt-labs/dbt-postgres/tree/main/tests/functional) tests. +`dbt-postgres` contains [code quality checks](https://github.com/dbt-labs/dbt-postgres/tree/main/.pre-commit-config.yaml), [unit tests](https://github.com/dbt-labs/dbt-postgres/tree/main/tests/unit), +and [functional tests](https://github.com/dbt-labs/dbt-postgres/tree/main/tests/functional). + +### Code quality +Code quality checks can run with a single command: +```shell +hatch run code-quality +``` ### Unit tests @@ -94,10 +110,14 @@ Unit tests can be run locally without setting up a database connection: ```shell # Note: replace $strings with valid names +# run all unit tests +hatch run unit-test + # run all unit tests in a module -python -m pytest tests/unit/$test_file_name.py +hatch run unit-tests tests/unit/$test_file_name.py + # run a specific unit test -python -m pytest tests/unit/$test_file_name.py::$test_class_name::$test_method_name +hatch run unit-tests tests/unit/$test_file_name.py::$test_class_name::$test_method_name ``` ### Functional tests @@ -120,16 +140,45 @@ Functional tests can be run locally with a valid database connection configured ```shell # Note: replace $strings with valid names +# run all functional tests +hatch run integration-tests + # run all functional tests in a directory -python -m pytest tests/functional/$test_directory +hatch run integration-tests tests/functional/$test_directory + # run all functional tests in a module -python -m pytest tests/functional/$test_dir_and_filename.py +hatch run integration-tests tests/functional/$test_directory/$test_filename.py + # run all functional tests in a class -python -m pytest tests/functional/$test_dir_and_filename.py::$test_class_name +hatch run integration-tests tests/functional/$test_directory/$test_filename.py::$test_class_name + # run a specific functional test -python -m pytest tests/functional/$test_dir_and_filename.py::$test_class_name::$test__method_name +hatch run integration-tests tests/functional/$test_directory/$test_filename.py::$test_class_name::$test__method_name +``` + +### Testing against a development branch + +Some changes require a change in `dbt-common` and/or `dbt-adapters`. +In that case, the dependency on `dbt-common` and/or `dbt-adapters` must be updated to point to the development branch. For example: + +```toml +[tool.hatch.envs.default] +dependencies = [ + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git@my-dev-branch", + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git@my-dev-branch", + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git@my-dev-branch#subdirectory=dbt-tests-adapter", + ..., +] ``` +This will install `dbt-common`/`dbt-adapters`/`dbt-tests-adapter` as snapshots. In other words, if `my-dev-branch` is updated on GitHub, those updates will not be reflected locally. +In order to pick up those updates, the `hatch` environment(s) will need to be rebuilt: + +```shell +exit +hatch env prune +hatch shell +``` ## Documentation diff --git a/pyproject.toml b/pyproject.toml index 92fbf82a3..c6454835f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,6 @@ dependencies = [ "dbt-common>=0.1.0a1,<2.0", "agate>=1.0,<2.0", ] - [project.urls] Homepage = "https://github.com/dbt-labs/dbt-postgres" Documentation = "https://docs.getdbt.com" @@ -56,44 +55,31 @@ path = "dbt/adapters/postgres/__version__.py" dependencies = [ "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", 'pre-commit==3.7.0;python_version>="3.9"', 'pre-commit==3.5.0;python_version=="3.8"', -] -[tool.hatch.envs.default.scripts] -dev = "pre-commit install" -code-quality = "pre-commit run --all-files" -docker-dev = [ - "echo Does not support integration testing, only development and unit testing. See issue https://github.com/dbt-labs/dbt-postgres/issues/99", - "docker build -f docker/dev.Dockerfile -t dbt-postgres-dev .", - "docker run --rm -it --name dbt-postgres-dev -v $(pwd):/opt/code dbt-postgres-dev", -] -docker-prod = "docker build -f docker/Dockerfile -t dbt-postgres ." - -[tool.hatch.envs.unit-tests] -dependencies = [ - "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", - "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", - "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", "freezegun", "pytest", "pytest-dotenv", "pytest-mock", "pytest-xdist", ] -[tool.hatch.envs.unit-tests.scripts] -all = "python -m pytest {args:tests/unit}" - -[tool.hatch.envs.integration-tests] -template = "unit-tests" -extra-dependencies = [ - "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", -] -[tool.hatch.envs.integration-tests.env-vars] +[tool.hatch.envs.default.env-vars] DBT_TEST_USER_1 = "dbt_test_user_1" DBT_TEST_USER_2 = "dbt_test_user_2" DBT_TEST_USER_3 = "dbt_test_user_3" -[tool.hatch.envs.integration-tests.scripts] -all = "python -m pytest {args:tests/functional}" +[tool.hatch.envs.default.scripts] +setup = "pre-commit install" +code-quality = "pre-commit run --all-files" +unit-tests = "python -m pytest {args:tests/unit}" +integration-tests = "python -m pytest {args:tests/functional}" +docker-dev = [ + "echo Does not support integration testing, only development and unit testing. See issue https://github.com/dbt-labs/dbt-postgres/issues/99", + "docker build -f docker/dev.Dockerfile -t dbt-postgres-dev .", + "docker run --rm -it --name dbt-postgres-dev -v $(pwd):/opt/code dbt-postgres-dev", +] +docker-prod = "docker build -f docker/Dockerfile -t dbt-postgres ." [tool.hatch.envs.build] detached = true From 033de4cab23d42bcc4a8a350c1c636d962a2d3af Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Jul 2024 21:31:07 +0000 Subject: [PATCH 086/114] Bump actions/checkout from 3 to 4 (#102) Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> --- .github/workflows/release_prep_hatch.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml index ab343b45b..0efe3e8d6 100644 --- a/.github/workflows/release_prep_hatch.yml +++ b/.github/workflows/release_prep_hatch.yml @@ -219,7 +219,7 @@ jobs: steps: - name: "Checkout ${{ github.event.repository.name }}@${{ needs.release-branch.outputs.name }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ needs.release-branch.outputs.name }} @@ -292,7 +292,7 @@ jobs: steps: - name: "Checkout ${{ github.event.repository.name }}@${{ needs.release-branch.outputs.name }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ needs.release-branch.outputs.name }} @@ -410,7 +410,7 @@ jobs: steps: - name: "Checkout ${{ github.event.repository.name }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: "Merge changes into ${{ inputs.branch }}" uses: everlytic/branch-merge@1.1.5 @@ -455,7 +455,7 @@ jobs: echo "name=$branch" >> $GITHUB_OUTPUT - name: "Checkout ${{ github.event.repository.name }}@${{ steps.branch.outputs.name }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ steps.branch.outputs.name }} From 6d99e2ba906db8665289a3035ed14750b9b5f90d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Jul 2024 21:46:42 +0000 Subject: [PATCH 087/114] Bump actions/setup-python from 4 to 5 (#118) Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- .github/workflows/integration-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index e3390fbd6..45fe8d4f2 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -113,7 +113,7 @@ jobs: uses: actions/checkout@v4 - name: "Set up Python ${{ matrix.python-version }}" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} From 1a04eb9f3a9545e8b242fe7ad4235b55146ec37c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Jul 2024 18:50:21 -0400 Subject: [PATCH 088/114] Bump dbt-labs/actions from 1.1.0 to 1.1.1 (#101) Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Co-authored-by: Matthew McKnight <91097623+McKnight-42@users.noreply.github.com> --- .github/workflows/release_prep_hatch.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml index 0efe3e8d6..a97268c63 100644 --- a/.github/workflows/release_prep_hatch.yml +++ b/.github/workflows/release_prep_hatch.yml @@ -113,7 +113,7 @@ jobs: - name: "Parse input version" id: semver - uses: dbt-labs/actions/parse-semver@v1.1.0 + uses: dbt-labs/actions/parse-semver@v1.1.1 with: version: ${{ inputs.version }} From 89d89d38e4f151e6f492fbe21bab43ab75c0c02c Mon Sep 17 00:00:00 2001 From: Jeremy Cohen <jtcohen6@gmail.com> Date: Thu, 18 Jul 2024 01:18:49 +0200 Subject: [PATCH 089/114] Bump deps on common, adapters, core (#95) Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c6454835f..0fb7b885a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,11 +24,11 @@ classifiers = [ ] dependencies = [ "psycopg2-binary>=2.9,<3.0", - "dbt-adapters>=0.1.0a1,<2.0", + "dbt-adapters>=1.1.1,<2.0", # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency - "dbt-core>=1.8.0a1", + "dbt-core>=1.8.0", # installed via dbt-adapters but used directly - "dbt-common>=0.1.0a1,<2.0", + "dbt-common>=1.0.4,<2.0", "agate>=1.0,<2.0", ] [project.urls] From 68fe56aefda7053bfccda1c6aabdbd1b22a44638 Mon Sep 17 00:00:00 2001 From: Mila Page <67295367+VersusFacit@users.noreply.github.com> Date: Tue, 23 Jul 2024 15:24:37 -0700 Subject: [PATCH 090/114] Add hatch commands to the README (#131) Co-authored-by: Mila Page <versusfacit@users.noreply.github.com> --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index 285f5144d..8053af19b 100644 --- a/README.md +++ b/README.md @@ -35,6 +35,11 @@ fi This ensures the version of `psycopg2` will match that of `psycopg2-binary`. + +## Contribute + +See `CONTRIBUTING.md` for a detailed overview of contributing a code change to this adapter. + ## Join the dbt Community - Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/) From 28541730e62fbfa0e3f9c1f714e22ac08e25c651 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 24 Jul 2024 12:11:00 -0400 Subject: [PATCH 091/114] Make `dependabot` ignore patch updates (#128) Co-authored-by: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> --- .github/dependabot.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index ae2be43aa..746dcae22 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,13 +5,25 @@ updates: schedule: interval: "daily" rebase-strategy: "disabled" + ignore: + - dependency-name: "*" + update-types: + - version-update:semver-patch - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" rebase-strategy: "disabled" + ignore: + - dependency-name: "*" + update-types: + - version-update:semver-patch - package-ecosystem: "docker" directory: "/docker" schedule: interval: "weekly" rebase-strategy: "disabled" + ignore: + - dependency-name: "*" + update-types: + - version-update:semver-patch From 550521ebaba99e4666f6b0908f429ec062fb6d8b Mon Sep 17 00:00:00 2001 From: Gerda Shank <gerda@dbtlabs.com> Date: Thu, 1 Aug 2024 14:59:06 -0400 Subject: [PATCH 092/114] Integration workflow update to support all-in-one adapter testing (#133) --- ...s.sh => update_dev_dependency_branches.sh} | 16 +++--- .github/workflows/integration-tests.yml | 49 +++++++++++++++++-- 2 files changed, 55 insertions(+), 10 deletions(-) rename .github/scripts/{update_dev_packages.sh => update_dev_dependency_branches.sh} (61%) diff --git a/.github/scripts/update_dev_packages.sh b/.github/scripts/update_dev_dependency_branches.sh similarity index 61% rename from .github/scripts/update_dev_packages.sh rename to .github/scripts/update_dev_dependency_branches.sh index c0f207b4e..cf652c318 100755 --- a/.github/scripts/update_dev_packages.sh +++ b/.github/scripts/update_dev_dependency_branches.sh @@ -2,16 +2,20 @@ set -e -adapters_git_branch=$1 -core_git_branch=$2 +dbt_adapters_branch=$1 +dbt_core_branch=$2 +dbt_common_branch=$3 target_req_file="pyproject.toml" -core_req_sed_pattern="s|dbt-core.git.*#subdirectory=core|dbt-core.git@${core_git_branch}#subdirectory=core|g" -adapters_req_sed_pattern="s|dbt-adapters.git|dbt-adapters.git@${adapters_git_branch}|g" +core_req_sed_pattern="s|dbt-core.git.*#subdirectory=core|dbt-core.git@${dbt_core_branch}#subdirectory=core|g" +adapters_req_sed_pattern="s|dbt-adapters.git|dbt-adapters.git@${dbt_adapters_branch}|g" +common_req_sed_pattern="s|dbt-common.git|dbt-common.git@${dbt_common_branch}|g" if [[ "$OSTYPE" == darwin* ]]; then # mac ships with a different version of sed that requires a delimiter arg - sed -i "" "$core_req_sed_pattern" $target_req_file sed -i "" "$adapters_req_sed_pattern" $target_req_file + sed -i "" "$core_req_sed_pattern" $target_req_file + sed -i "" "$common_req_sed_pattern" $target_req_file else - sed -i "$core_req_sed_pattern" $target_req_file sed -i "$adapters_req_sed_pattern" $target_req_file + sed -i "$core_req_sed_pattern" $target_req_file + sed -i "$common_req_sed_pattern" $target_req_file fi diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 45fe8d4f2..590f6f67f 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -1,5 +1,7 @@ name: Integration Tests +run-name: "${{ (contains(github.event_name, 'workflow_') && inputs.name) || github.event_name }}: ${{ (contains(github.event_name, 'workflow_') && inputs.adapter_branch) || github.ref_name }} by @${{ github.actor }}" + on: push: branches: @@ -8,28 +10,58 @@ on: pull_request: workflow_dispatch: inputs: + name: + description: "Name to associate with run (example: 'dbt-adapters-242')" + required: false + type: string + default: "Adapter Integration Tests" + adapter_branch: + description: "The branch of this adapter repository to use" + type: string + required: false + default: "main" dbt_adapters_branch: description: "The branch of dbt-adapters to use" type: string required: false default: "main" - core_branch: + dbt_core_branch: description: "The branch of dbt-core to use" type: string required: false default: "main" + dbt_common_branch: + description: "The branch of dbt-common to use" + type: string + required: false + default: "main" workflow_call: inputs: + name: + description: "name to associate with run" + required: false + type: string + default: "Adapter Integration Tests" + adapter_branch: + description: "The branch of this adapter repository to use" + type: string + required: false + default: "main" dbt_adapters_branch: description: "The branch of dbt-adapters to use" type: string required: false default: "main" - core_branch: + dbt_core_branch: description: "The branch of dbt-core to use" type: string required: false default: "main" + dbt_common_branch: + description: "The branch of dbt-common to use" + type: string + required: false + default: "main" permissions: read-all @@ -67,14 +99,23 @@ jobs: steps: - name: Check out repository + if: ${{ github.event_name == 'pull_request'|| github.event_name == 'push' }} uses: actions/checkout@v4 + - name: Check out the repository (workflow_dispatch) + if: ${{ github.event_name == 'workflow_dispatch' || github.event_name == 'workflow_call' }} + uses: actions/checkout@v4 + with: + ref: ${{ inputs.adapter_branch }} + - name: Update Adapters and Core branches if: ${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch'}} run: | - ./.github/scripts/update_dev_packages.sh \ + ./.github/scripts/update_dev_dependency_branches.sh \ ${{ inputs.dbt_adapters_branch }} \ - ${{ inputs.core_branch }} + ${{ inputs.dbt_core_branch }} \ + ${{ inputs.dbt_common_branch }} + cat pyproject.toml - name: Setup postgres run: psql -f ./scripts/setup_test_database.sql From aa0fe479c848f15acb0563a602bea39a7ba2cc54 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 1 Aug 2024 15:10:53 -0400 Subject: [PATCH 093/114] Add support for python 3.12 (#135) Co-authored-by: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> --- .changes/unreleased/Features-20240731-210800.yaml | 6 ++++++ .github/workflows/integration-tests.yml | 4 ++-- .github/workflows/release.yml | 6 ++++++ .github/workflows/release_prep_hatch.yml | 2 +- .github/workflows/unit-tests.yml | 2 +- .pre-commit-config.yaml | 1 + docker/Dockerfile | 2 +- pyproject.toml | 1 + 8 files changed, 19 insertions(+), 5 deletions(-) create mode 100644 .changes/unreleased/Features-20240731-210800.yaml diff --git a/.changes/unreleased/Features-20240731-210800.yaml b/.changes/unreleased/Features-20240731-210800.yaml new file mode 100644 index 000000000..b2fc1f0ad --- /dev/null +++ b/.changes/unreleased/Features-20240731-210800.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add support for Python 3.12 +time: 2024-07-31T21:08:00.170999-04:00 +custom: + Author: mikealfare + Issue: "17" diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 590f6f67f..374908f46 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -82,7 +82,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] services: postgres: @@ -148,7 +148,7 @@ jobs: fail-fast: false matrix: platform: [ubuntu-22.04, macos-12] - python-version: ["3.8", "3.11"] + python-version: ["3.8", "3.12"] steps: - name: "Check out repository" uses: actions/checkout@v4 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1139380a8..bbf363993 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,6 +19,10 @@ on: description: "Only release Docker image, skip GitHub & PyPI" type: boolean default: false + python_version: + description: "Python version for building and testing the build" + type: string + default: "3.12" permissions: contents: write # this is the permission that allows creating a new release @@ -53,6 +57,8 @@ jobs: - name: "Setup `hatch`" uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main + with: + python-version: ${{ inputs.python_version }} - name: "Set archive name" id: archive diff --git a/.github/workflows/release_prep_hatch.yml b/.github/workflows/release_prep_hatch.yml index a97268c63..18d7637b8 100644 --- a/.github/workflows/release_prep_hatch.yml +++ b/.github/workflows/release_prep_hatch.yml @@ -77,7 +77,7 @@ defaults: shell: bash env: - PYTHON_TARGET_VERSION: 3.11 + PYTHON_TARGET_VERSION: 3.12 NOTIFICATION_PREFIX: "[Release Prep]" jobs: diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index e171ff787..afb88136c 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -23,7 +23,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - name: Check out repository diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c869fe86e..0bd01f7cc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,6 +27,7 @@ repos: - --target-version=py39 - --target-version=py310 - --target-version=py311 + - --target-version=py312 - repo: https://github.com/pycqa/flake8 rev: 7.0.0 diff --git a/docker/Dockerfile b/docker/Dockerfile index 7c8dc14ee..b6a87dfc6 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,5 +1,5 @@ # this image gets published to GHCR for production use -ARG py_version=3.11.2 +ARG py_version=3.12.4 FROM python:$py_version-slim-bullseye as base diff --git a/pyproject.toml b/pyproject.toml index 0fb7b885a..e6848a4f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ "psycopg2-binary>=2.9,<3.0", From 5575fa86d1e74989418d9dfebe79ecca207e134a Mon Sep 17 00:00:00 2001 From: leahwicz <60146280+leahwicz@users.noreply.github.com> Date: Tue, 6 Aug 2024 14:40:44 -0400 Subject: [PATCH 094/114] Updating changie.yaml to add contributors and PR links (#134) Co-authored-by: Mike Alfare <13974384+mikealfare@users.noreply.github.com> --- .../Under the Hood-20240731-075011.yaml | 6 + .changie.yaml | 117 ++++++++++++++++-- 2 files changed, 114 insertions(+), 9 deletions(-) create mode 100644 .changes/unreleased/Under the Hood-20240731-075011.yaml diff --git a/.changes/unreleased/Under the Hood-20240731-075011.yaml b/.changes/unreleased/Under the Hood-20240731-075011.yaml new file mode 100644 index 000000000..185c91484 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240731-075011.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Updating changie.yaml to add contributors and PR links +time: 2024-07-31T07:50:11.875044-04:00 +custom: + Author: leahwicz + Issue: "109" diff --git a/.changie.yaml b/.changie.yaml index 1efbf5848..c5c2f069f 100644 --- a/.changie.yaml +++ b/.changie.yaml @@ -1,20 +1,65 @@ changesDir: .changes unreleasedDir: unreleased headerPath: header.tpl.md +versionHeaderPath: "" changelogPath: CHANGELOG.md versionExt: md -envPrefix: CHANGIE_ +envPrefix: "CHANGIE_" versionFormat: '## dbt-postgres {{.Version}} - {{.Time.Format "January 02, 2006"}}' kindFormat: '### {{.Kind}}' -changeFormat: '* {{.Body}}' +changeFormat: |- + {{- $IssueList := list }} + {{- $changes := splitList " " $.Custom.Issue }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-postgres/issues/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}}) + kinds: - - label: Breaking Changes - - label: Features - - label: Fixes - - label: Docs - - label: Under the Hood - - label: Dependencies - - label: Security +- label: Breaking Changes +- label: Features +- label: Fixes +- label: Under the Hood +- label: Dependencies + changeFormat: |- + {{- $PRList := list }} + {{- $changes := splitList " " $.Custom.PR }} + {{- range $pullrequest := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-postgres/pull/nbr)" | replace "nbr" $pullrequest }} + {{- $PRList = append $PRList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) + skipGlobalChoices: true + additionalChoices: + - key: Author + label: GitHub Username(s) (separated by a single space if multiple) + type: string + minLength: 3 + - key: PR + label: GitHub Pull Request Number (separated by a single space if multiple) + type: string + minLength: 1 +- label: Security + changeFormat: |- + {{- $PRList := list }} + {{- $changes := splitList " " $.Custom.PR }} + {{- range $pullrequest := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-postgres/pull/nbr)" | replace "nbr" $pullrequest }} + {{- $PRList = append $PRList $changeLink }} + {{- end -}} + - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) + skipGlobalChoices: true + additionalChoices: + - key: Author + label: GitHub Username(s) (separated by a single space if multiple) + type: string + minLength: 3 + - key: PR + label: GitHub Pull Request Number (separated by a single space if multiple) + type: string + minLength: 1 + newlines: afterChangelogHeader: 1 afterKind: 1 @@ -31,3 +76,57 @@ custom: label: GitHub Issue Number (separated by a single space if multiple) type: string minLength: 1 + + +footerFormat: | + {{- $contributorDict := dict }} + {{- /* ensure all names in this list are all lowercase for later matching purposes */}} + {{- $core_team := splitList " " .Env.CORE_TEAM }} + {{- /* ensure we always skip snyk and dependabot in addition to the core team */}} + {{- $maintainers := list "dependabot[bot]" "snyk-bot"}} + {{- range $team_member := $core_team }} + {{- $team_member_lower := lower $team_member }} + {{- $maintainers = append $maintainers $team_member_lower }} + {{- end }} + {{- range $change := .Changes }} + {{- $authorList := splitList " " $change.Custom.Author }} + {{- /* loop through all authors for a single changelog */}} + {{- range $author := $authorList }} + {{- $authorLower := lower $author }} + {{- /* we only want to include non-core team contributors */}} + {{- if not (has $authorLower $maintainers)}} + {{- $changeList := splitList " " $change.Custom.Author }} + {{- $IssueList := list }} + {{- $changeLink := $change.Kind }} + {{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }} + {{- $changes := splitList " " $change.Custom.PR }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-postgres/pull/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + {{- else }} + {{- $changes := splitList " " $change.Custom.Issue }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-postgres/issues/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} + {{- end -}} + {{- end }} + {{- /* check if this contributor has other changes associated with them already */}} + {{- if hasKey $contributorDict $author }} + {{- $contributionList := get $contributorDict $author }} + {{- $contributionList = concat $contributionList $IssueList }} + {{- $contributorDict := set $contributorDict $author $contributionList }} + {{- else }} + {{- $contributionList := $IssueList }} + {{- $contributorDict := set $contributorDict $author $contributionList }} + {{- end }} + {{- end}} + {{- end}} + {{- end }} + {{- /* no indentation here for formatting so the final markdown doesn't have unneeded indentations */}} + {{- if $contributorDict}} + ### Contributors + {{- range $k,$v := $contributorDict }} + - [@{{$k}}](https://github.com/{{$k}}) ({{ range $index, $element := $v }}{{if $index}}, {{end}}{{$element}}{{end}}) + {{- end }} + {{- end }} From 6e4a8e351402b31e48ccb2d5ae5b6f014f076230 Mon Sep 17 00:00:00 2001 From: Michelle Ark <MichelleArk@users.noreply.github.com> Date: Mon, 9 Sep 2024 17:11:45 -0400 Subject: [PATCH 095/114] remove tests covered by dbt-core (#147) --- tests/functional/deprecations/fixtures.py | 101 ---- .../deprecations/model_deprecations.py | 106 ---- .../deprecations/test_config_deprecations.py | 148 ------ .../deprecations/test_deprecations.py | 148 ------ tests/functional/minimal_cli/fixtures.py | 111 ---- .../minimal_cli/test_minimal_cli.py | 52 -- tests/functional/test_dbt_runner.py | 73 --- tests/functional/test_init.py | 493 ------------------ 8 files changed, 1232 deletions(-) delete mode 100644 tests/functional/deprecations/fixtures.py delete mode 100644 tests/functional/deprecations/model_deprecations.py delete mode 100644 tests/functional/deprecations/test_config_deprecations.py delete mode 100644 tests/functional/deprecations/test_deprecations.py delete mode 100644 tests/functional/minimal_cli/fixtures.py delete mode 100644 tests/functional/minimal_cli/test_minimal_cli.py delete mode 100644 tests/functional/test_dbt_runner.py delete mode 100644 tests/functional/test_init.py diff --git a/tests/functional/deprecations/fixtures.py b/tests/functional/deprecations/fixtures.py deleted file mode 100644 index 0028f206e..000000000 --- a/tests/functional/deprecations/fixtures.py +++ /dev/null @@ -1,101 +0,0 @@ -models__already_exists_sql = """ -select 1 as id - -{% if adapter.already_exists(this.schema, this.identifier) and not should_full_refresh() %} - where id > (select max(id) from {{this}}) -{% endif %} -""" - -models_trivial__model_sql = """ -select 1 as id -""" - - -bad_name_yaml = """ -version: 2 - -exposures: - - name: simple exposure spaced!! - type: dashboard - depends_on: - - ref('model') - owner: - email: something@example.com -""" - -# deprecated test config fixtures -data_tests_yaml = """ -models: - - name: model - columns: - - name: id - data_tests: - - not_null -""" - -test_type_mixed_yaml = """ -models: - - name: model - columns: - - name: id - data_tests: - - not_null - tests: - - unique -""" - -old_tests_yaml = """ -models: - - name: model - columns: - - name: id - tests: - - not_null -""" - -sources_old_tests_yaml = """ -sources: - - name: seed_source - schema: "{{ var('schema_override', target.schema) }}" - tables: - - name: "seed" - columns: - - name: id - tests: - - unique -""" - -seed_csv = """id,name -1,Mary -2,Sam -3,John -""" - - -local_dependency__dbt_project_yml = """ - -name: 'local_dep' -version: '1.0' - -seeds: - quote_columns: False - -""" - -local_dependency__schema_yml = """ -sources: - - name: seed_source - schema: "{{ var('schema_override', target.schema) }}" - tables: - - name: "seed" - columns: - - name: id - tests: - - unique -""" - -local_dependency__seed_csv = """id,name -1,Mary -2,Sam -3,John -""" diff --git a/tests/functional/deprecations/model_deprecations.py b/tests/functional/deprecations/model_deprecations.py deleted file mode 100644 index c762e7a65..000000000 --- a/tests/functional/deprecations/model_deprecations.py +++ /dev/null @@ -1,106 +0,0 @@ -from dbt.cli.main import dbtRunner -from dbt.tests.util import run_dbt -from dbt_common.exceptions import EventCompilationError -import pytest - - -deprecated_model__yml = """ -version: 2 - -models: - - name: my_model - description: deprecated - deprecation_date: 1999-01-01 -""" - -deprecating_model__yml = """ -version: 2 - -models: - - name: my_model - description: deprecating in the future - deprecation_date: 2999-01-01 -""" - -model__sql = """ -select 1 as Id -""" - -dependant_model__sql = """ -select * from {{ ref("my_model") }} -""" - - -class TestModelDeprecationWarning: - @pytest.fixture(scope="class") - def models(self): - return {"my_model.sql": model__sql, "my_schema.yml": deprecated_model__yml} - - def test_deprecation_warning(self, project): - events = [] - dbtRunner(callbacks=[events.append]).invoke(["parse"]) - matches = list([e for e in events if e.info.name == "DeprecatedModel"]) - assert len(matches) == 1 - assert matches[0].read_data.model_name == "my_model" - - def test_deprecation_warning_error(self, project): - with pytest.raises(EventCompilationError): - run_dbt(["--warn-error", "parse"]) - - def test_deprecation_warning_error_options(self, project): - with pytest.raises(EventCompilationError): - run_dbt(["--warn-error-options", '{"include": ["DeprecatedModel"]}', "parse"]) - - -class TestUpcomingReferenceDeprecatingWarning: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": model__sql, - "my_dependant_model.sql": dependant_model__sql, - "my_schema.yml": deprecating_model__yml, - } - - def test_deprecation_warning(self, project): - events = [] - dbtRunner(callbacks=[events.append]).invoke(["parse"]) - matches = list([e for e in events if e.info.name == "UpcomingReferenceDeprecation"]) - assert len(matches) == 1 - assert matches[0].read_data.model_name == "my_dependant_model" - assert matches[0].read_data.ref_model_name == "my_model" - - def test_deprecation_warning_error(self, project): - with pytest.raises(EventCompilationError): - run_dbt(["--warn-error", "parse"]) - - def test_deprecation_warning_error_options(self, project): - with pytest.raises(EventCompilationError): - run_dbt( - ["--warn-error-options", '{"include": ["UpcomingReferenceDeprecation"]}', "parse"] - ) - - -class TestDeprecatedReferenceWarning: - @pytest.fixture(scope="class") - def models(self): - return { - "my_model.sql": model__sql, - "my_dependant_model.sql": dependant_model__sql, - "my_schema.yml": deprecated_model__yml, - } - - def test_deprecation_warning(self, project): - events = [] - dbtRunner(callbacks=[events.append]).invoke(["parse"]) - matches = list([e for e in events if e.info.name == "DeprecatedReference"]) - assert len(matches) == 1 - assert matches[0].read_data.model_name == "my_dependant_model" - assert matches[0].read_data.ref_model_name == "my_model" - - def test_deprecation_warning_error(self, project): - with pytest.raises(EventCompilationError): - run_dbt(["--warn-error", "parse"]) - - def test_deprecation_warning_error_options(self, project): - with pytest.raises(EventCompilationError): - run_dbt(["--warn-error-options", '{"include": ["DeprecatedReference"]}', "parse"]) diff --git a/tests/functional/deprecations/test_config_deprecations.py b/tests/functional/deprecations/test_config_deprecations.py deleted file mode 100644 index f8623c1ae..000000000 --- a/tests/functional/deprecations/test_config_deprecations.py +++ /dev/null @@ -1,148 +0,0 @@ -from dbt.deprecations import active_deprecations, reset_deprecations -from dbt.exceptions import ProjectContractError, YamlParseDictError -from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import run_dbt, update_config_file -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.deprecations import fixtures - - -# test deprecation messages -class TestTestsConfigDeprecation: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": fixtures.models_trivial__model_sql} - - @pytest.fixture(scope="class") - def project_config_update(self, unique_schema): - return {"tests": {"enabled": "true"}} - - def test_tests_config(self, project): - reset_deprecations() - assert active_deprecations == set() - run_dbt(["parse"]) - expected = {"project-test-config"} - assert expected == active_deprecations - - def test_tests_config_fail(self, project): - reset_deprecations() - assert active_deprecations == set() - with pytest.raises(CompilationError) as exc: - run_dbt(["--warn-error", "--no-partial-parse", "parse"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - expected_msg = "The `tests` config has been renamed to `data_tests`" - assert expected_msg in exc_str - - -class TestSchemaTestDeprecation: - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": fixtures.models_trivial__model_sql, - "schema.yml": fixtures.old_tests_yaml, - } - - def test_tests_config(self, project): - reset_deprecations() - assert active_deprecations == set() - run_dbt(["parse"]) - expected = {"project-test-config"} - assert expected == active_deprecations - - def test_schema_tests_fail(self, project): - reset_deprecations() - assert active_deprecations == set() - with pytest.raises(CompilationError) as exc: - run_dbt(["--warn-error", "--no-partial-parse", "parse"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - expected_msg = "The `tests` config has been renamed to `data_tests`" - assert expected_msg in exc_str - - -class TestSourceSchemaTestDeprecation: - @pytest.fixture(scope="class") - def models(self): - return {"schema.yml": fixtures.sources_old_tests_yaml} - - @pytest.fixture(scope="class") - def seeds(self): - return {"seed.csv": fixtures.seed_csv} - - def test_source_tests_config(self, project): - reset_deprecations() - assert active_deprecations == set() - run_dbt(["seed"]) - run_dbt(["parse"]) - expected = {"project-test-config"} - assert expected == active_deprecations - - def test_schema_tests(self, project): - run_dbt(["seed"]) - results = run_dbt(["test"]) - assert len(results) == 1 - - -# test for failure with test and data_tests in the same file -class TestBothSchemaTestDeprecation: - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": fixtures.models_trivial__model_sql, - "schema.yml": fixtures.test_type_mixed_yaml, - } - - def test_schema(self, project): - expected_msg = "Invalid test config: cannot have both 'tests' and 'data_tests' defined" - with pytest.raises(YamlParseDictError) as excinfo: - run_dbt(["parse"]) - assert expected_msg in str(excinfo.value) - - -# test for failure with test and data_tests in the same dbt_project.yml -class TestBothProjectTestDeprecation: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": fixtures.models_trivial__model_sql} - - def test_tests_config(self, project): - config_patch = {"tests": {"+enabled": "true"}, "data_tests": {"+tags": "super"}} - update_config_file(config_patch, project.project_root, "dbt_project.yml") - - expected_msg = "Invalid project config: cannot have both 'tests' and 'data_tests' defined" - with pytest.raises(ProjectContractError) as excinfo: - run_dbt(["parse"]) - assert expected_msg in str(excinfo.value) - - -# test a local dependency can have tests while the rest of the project uses data_tests -class TestTestConfigInDependency: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - local_dependency_files = { - "dbt_project.yml": fixtures.local_dependency__dbt_project_yml, - "models": { - "schema.yml": fixtures.local_dependency__schema_yml, - }, - "seeds": {"seed.csv": fixtures.local_dependency__seed_csv}, - } - write_project_files(project_root, "local_dependency", local_dependency_files) - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "local_dependency"}]} - - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": fixtures.models_trivial__model_sql, - "schema.yml": fixtures.data_tests_yaml, - } - - def test_test_dep(self, project): - run_dbt(["deps"]) - run_dbt(["seed"]) - run_dbt(["run"]) - results = run_dbt(["test"]) - # 1 data_test in the dep and 1 in the project - assert len(results) == 2 diff --git a/tests/functional/deprecations/test_deprecations.py b/tests/functional/deprecations/test_deprecations.py deleted file mode 100644 index 1f4a31c28..000000000 --- a/tests/functional/deprecations/test_deprecations.py +++ /dev/null @@ -1,148 +0,0 @@ -from dbt.deprecations import active_deprecations, reset_deprecations -from dbt.tests.util import run_dbt, write_file -from dbt_common.exceptions import CompilationError -import pytest -import yaml - -from tests.functional.deprecations import fixtures - - -class TestConfigPathDeprecation: - @pytest.fixture(scope="class") - def models(self): - return {"already_exists.sql": fixtures.models_trivial__model_sql} - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "data-paths": ["data"], - "log-path": "customlogs", - "target-path": "customtarget", - } - - def test_data_path(self, project): - reset_deprecations() - assert active_deprecations == set() - run_dbt(["debug"]) - expected = { - "project-config-data-paths", - "project-config-log-path", - "project-config-target-path", - } - assert expected == active_deprecations - - def test_data_path_fail(self, project): - reset_deprecations() - assert active_deprecations == set() - with pytest.raises(CompilationError) as exc: - run_dbt(["--warn-error", "debug"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - expected_msg = "The `data-paths` config has been renamed" - assert expected_msg in exc_str - - -class TestPackageInstallPathDeprecation: - @pytest.fixture(scope="class") - def models_trivial(self): - return {"model.sql": fixtures.models_trivial__model_sql} - - @pytest.fixture(scope="class") - def project_config_update(self): - return {"config-version": 2, "clean-targets": ["dbt_modules"]} - - def test_package_path(self, project): - reset_deprecations() - assert active_deprecations == set() - run_dbt(["clean"]) - expected = {"install-packages-path"} - assert expected == active_deprecations - - def test_package_path_not_set(self, project): - reset_deprecations() - assert active_deprecations == set() - with pytest.raises(CompilationError) as exc: - run_dbt(["--warn-error", "clean"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - expected_msg = "path has changed from `dbt_modules` to `dbt_packages`." - assert expected_msg in exc_str - - -class TestPackageRedirectDeprecation: - @pytest.fixture(scope="class") - def models(self): - return {"already_exists.sql": fixtures.models_trivial__model_sql} - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"package": "fishtown-analytics/dbt_utils", "version": "0.7.0"}]} - - def test_package_redirect(self, project): - reset_deprecations() - assert active_deprecations == set() - run_dbt(["deps"]) - expected = {"package-redirect"} - assert expected == active_deprecations - - # if this test comes before test_package_redirect it will raise an exception as expected - def test_package_redirect_fail(self, project): - reset_deprecations() - assert active_deprecations == set() - with pytest.raises(CompilationError) as exc: - run_dbt(["--warn-error", "deps"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - expected_msg = "The `fishtown-analytics/dbt_utils` package is deprecated in favor of `dbt-labs/dbt_utils`" - assert expected_msg in exc_str - - -class TestExposureNameDeprecation: - @pytest.fixture(scope="class") - def models(self): - return { - "model.sql": fixtures.models_trivial__model_sql, - "bad_name.yml": fixtures.bad_name_yaml, - } - - def test_exposure_name(self, project): - reset_deprecations() - assert active_deprecations == set() - run_dbt(["parse"]) - expected = {"exposure-name"} - assert expected == active_deprecations - - def test_exposure_name_fail(self, project): - reset_deprecations() - assert active_deprecations == set() - with pytest.raises(CompilationError) as exc: - run_dbt(["--warn-error", "--no-partial-parse", "parse"]) - exc_str = " ".join(str(exc.value).split()) # flatten all whitespace - expected_msg = "Starting in v1.3, the 'name' of an exposure should contain only letters, numbers, and underscores." - assert expected_msg in exc_str - - -class TestPrjectFlagsMovedDeprecation: - @pytest.fixture(scope="class") - def profiles_config_update(self): - return { - "config": {"send_anonymous_usage_stats": False}, - } - - @pytest.fixture(scope="class") - def dbt_project_yml(self, project_root, project_config_update): - project_config = { - "name": "test", - "profile": "test", - } - write_file(yaml.safe_dump(project_config), project_root, "dbt_project.yml") - return project_config - - @pytest.fixture(scope="class") - def models(self): - return {"my_model.sql": "select 1 as fun"} - - def test_profile_config_deprecation(self, project): - reset_deprecations() - assert active_deprecations == set() - run_dbt(["parse"]) - expected = {"project-flags-moved"} - assert expected == active_deprecations diff --git a/tests/functional/minimal_cli/fixtures.py b/tests/functional/minimal_cli/fixtures.py deleted file mode 100644 index dadfb130f..000000000 --- a/tests/functional/minimal_cli/fixtures.py +++ /dev/null @@ -1,111 +0,0 @@ -import pytest -from click.testing import CliRunner - -models__schema_yml = """ -version: 2 -models: - - name: sample_model - columns: - - name: sample_num - data_tests: - - accepted_values: - values: [1, 2] - - not_null - - name: sample_bool - data_tests: - - not_null - - unique -""" - -models__sample_model = """ -select * from {{ ref('sample_seed') }} -""" - -snapshots__sample_snapshot = """ -{% snapshot orders_snapshot %} - -{{ - config( - target_database='dbt', - target_schema='snapshots', - unique_key='sample_num', - strategy='timestamp', - updated_at='updated_at', - ) -}} - -select * from {{ ref('sample_model') }} - -{% endsnapshot %} -""" - -seeds__sample_seed = """sample_num,sample_bool -1,true -2,false -,true -""" - -tests__failing_sql = """ -{{ config(severity = 'warn') }} -select 1 -""" - - -class BaseConfigProject: - @pytest.fixture() - def runner(self): - return CliRunner() - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "name": "jaffle_shop", - "profile": "jaffle_shop", - "version": "0.1.0", - "config-version": 2, - "clean-targets": ["target", "dbt_packages", "logs"], - } - - @pytest.fixture(scope="class") - def profiles_config_update(self): - return { - "jaffle_shop": { - "outputs": { - "dev": { - "type": "postgres", - "dbname": "dbt", - "schema": "jaffle_shop", - "host": "localhost", - "user": "root", - "port": 5432, - "pass": "password", - } - }, - "target": "dev", - } - } - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"package": "dbt-labs/dbt_utils", "version": "1.0.0"}]} - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models__schema_yml, - "sample_model.sql": models__sample_model, - } - - @pytest.fixture(scope="class") - def snapshots(self): - return {"sample_snapshot.sql": snapshots__sample_snapshot} - - @pytest.fixture(scope="class") - def seeds(self): - return {"sample_seed.csv": seeds__sample_seed} - - @pytest.fixture(scope="class") - def tests(self): - return { - "failing.sql": tests__failing_sql, - } diff --git a/tests/functional/minimal_cli/test_minimal_cli.py b/tests/functional/minimal_cli/test_minimal_cli.py deleted file mode 100644 index 1fccbbd07..000000000 --- a/tests/functional/minimal_cli/test_minimal_cli.py +++ /dev/null @@ -1,52 +0,0 @@ -from dbt.cli.main import cli - -from tests.functional.minimal_cli.fixtures import BaseConfigProject -from tests.functional.utils import up_one - - -class TestClean(BaseConfigProject): - """Test the minimal/happy-path for the CLI using the Click CliRunner""" - - def test_clean(self, runner, project): - result = runner.invoke(cli, ["clean"]) - assert "target" in result.output - assert "dbt_packages" in result.output - assert "logs" in result.output - - -class TestCleanUpLevel(BaseConfigProject): - def test_clean_one_level_up(self, runner, project): - with up_one(): - result = runner.invoke(cli, ["clean"]) - assert result.exit_code == 2 - assert "Runtime Error" in result.output - assert "No dbt_project.yml" in result.output - - -class TestDeps(BaseConfigProject): - def test_deps(self, runner, project): - result = runner.invoke(cli, ["deps"]) - assert "dbt-labs/dbt_utils" in result.output - assert "1.0.0" in result.output - - -class TestBuild(BaseConfigProject): - def test_build(self, runner, project): - runner.invoke(cli, ["deps"]) - result = runner.invoke(cli, ["build"]) - # 1 seed, 1 model, 2 data tests - assert "PASS=4" in result.output - # 2 data tests - assert "ERROR=2" in result.output - # Singular test - assert "WARN=1" in result.output - # 1 snapshot - assert "SKIP=1" in result.output - - -class TestDocsGenerate(BaseConfigProject): - def test_docs_generate(self, runner, project): - runner.invoke(cli, ["deps"]) - result = runner.invoke(cli, ["docs", "generate"]) - assert "Building catalog" in result.output - assert "Catalog written" in result.output diff --git a/tests/functional/test_dbt_runner.py b/tests/functional/test_dbt_runner.py deleted file mode 100644 index d3db2d20b..000000000 --- a/tests/functional/test_dbt_runner.py +++ /dev/null @@ -1,73 +0,0 @@ -from unittest import mock - -from dbt.cli.exceptions import DbtUsageException -from dbt.cli.main import dbtRunner -from dbt.exceptions import DbtProjectError -import pytest - - -class TestDbtRunner: - @pytest.fixture - def dbt(self) -> dbtRunner: - return dbtRunner() - - def test_group_invalid_option(self, dbt: dbtRunner) -> None: - res = dbt.invoke(["--invalid-option"]) - assert isinstance(res.exception, DbtUsageException) - - def test_command_invalid_option(self, dbt: dbtRunner) -> None: - res = dbt.invoke(["deps", "--invalid-option"]) - assert isinstance(res.exception, DbtUsageException) - - def test_command_mutually_exclusive_option(self, dbt: dbtRunner) -> None: - res = dbt.invoke(["--warn-error", "--warn-error-options", '{"include": "all"}', "deps"]) - assert isinstance(res.exception, DbtUsageException) - res = dbt.invoke(["deps", "--warn-error", "--warn-error-options", '{"include": "all"}']) - assert isinstance(res.exception, DbtUsageException) - - def test_invalid_command(self, dbt: dbtRunner) -> None: - res = dbt.invoke(["invalid-command"]) - assert isinstance(res.exception, DbtUsageException) - - def test_invoke_version(self, dbt: dbtRunner) -> None: - dbt.invoke(["--version"]) - - def test_callbacks(self) -> None: - mock_callback = mock.MagicMock() - dbt = dbtRunner(callbacks=[mock_callback]) - # the `debug` command is one of the few commands wherein you don't need - # to have a project to run it and it will emit events - dbt.invoke(["debug"]) - mock_callback.assert_called() - - def test_invoke_kwargs(self, project, dbt): - res = dbt.invoke( - ["run"], - log_format="json", - log_path="some_random_path", - version_check=False, - profile_name="some_random_profile_name", - target_dir="some_random_target_dir", - ) - assert res.result.args["log_format"] == "json" - assert res.result.args["log_path"] == "some_random_path" - assert res.result.args["version_check"] is False - assert res.result.args["profile_name"] == "some_random_profile_name" - assert res.result.args["target_dir"] == "some_random_target_dir" - - def test_invoke_kwargs_project_dir(self, project, dbt): - res = dbt.invoke(["run"], project_dir="some_random_project_dir") - assert isinstance(res.exception, DbtProjectError) - - msg = "No dbt_project.yml found at expected path some_random_project_dir" - assert msg in res.exception.msg - - def test_invoke_kwargs_profiles_dir(self, project, dbt): - res = dbt.invoke(["run"], profiles_dir="some_random_profiles_dir") - assert isinstance(res.exception, DbtProjectError) - msg = "Could not find profile named 'test'" - assert msg in res.exception.msg - - def test_invoke_kwargs_and_flags(self, project, dbt): - res = dbt.invoke(["--log-format=text", "run"], log_format="json") - assert res.result.args["log_format"] == "json" diff --git a/tests/functional/test_init.py b/tests/functional/test_init.py deleted file mode 100644 index 1c8202b74..000000000 --- a/tests/functional/test_init.py +++ /dev/null @@ -1,493 +0,0 @@ -import os -from pathlib import Path -from unittest.mock import Mock, call, patch - -import click -from dbt_common.exceptions import DbtRuntimeError -from dbt.tests.util import run_dbt -import pytest - - -class TestInitProjectWithExistingProfilesYml: - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.confirm") - @patch("click.prompt") - def test_init_task_in_project_with_existing_profiles_yml( - self, mock_prompt, mock_confirm, mock_get_adapter, project - ): - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.attach_mock(mock_confirm, "confirm") - manager.confirm.side_effect = ["y"] - manager.prompt.side_effect = [ - 1, - "localhost", - 5432, - "test_user", - "test_password", - "test_db", - "test_schema", - 4, - ] - mock_get_adapter.return_value = [project.adapter.type()] - - run_dbt(["init"]) - - manager.assert_has_calls( - [ - call.confirm( - f"The profile test already exists in {os.path.join(project.profiles_dir, 'profiles.yml')}. Continue and overwrite it?" - ), - call.prompt( - "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", - type=click.INT, - ), - call.prompt( - "host (hostname for the instance)", default=None, hide_input=False, type=None - ), - call.prompt("port", default=5432, hide_input=False, type=click.INT), - call.prompt("user (dev username)", default=None, hide_input=False, type=None), - call.prompt("pass (dev password)", default=None, hide_input=True, type=None), - call.prompt( - "dbname (default database that dbt will build objects in)", - default=None, - hide_input=False, - type=None, - ), - call.prompt( - "schema (default schema that dbt will build objects in)", - default=None, - hide_input=False, - type=None, - ), - call.prompt("threads (1 or more)", default=1, hide_input=False, type=click.INT), - ] - ) - - with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: - assert ( - f.read() - == """test: - outputs: - dev: - dbname: test_db - host: localhost - pass: test_password - port: 5432 - schema: test_schema - threads: 4 - type: postgres - user: test_user - target: dev -""" - ) - - def test_init_task_in_project_specifying_profile_errors(self): - with pytest.raises(DbtRuntimeError) as error: - run_dbt(["init", "--profile", "test"], expect_pass=False) - assert "Can not init existing project with specified profile" in str(error) - - -class TestInitProjectWithoutExistingProfilesYml: - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.prompt") - @patch.object(Path, "exists", autospec=True) - def test_init_task_in_project_without_existing_profiles_yml( - self, exists, mock_prompt, mock_get_adapter, project - ): - def exists_side_effect(path): - # Override responses on specific files, default to 'real world' if not overriden - return {"profiles.yml": False}.get(path.name, os.path.exists(path)) - - exists.side_effect = exists_side_effect - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.prompt.side_effect = [ - 1, - "localhost", - 5432, - "test_user", - "test_password", - "test_db", - "test_schema", - 4, - ] - mock_get_adapter.return_value = [project.adapter.type()] - - run_dbt(["init"]) - - manager.assert_has_calls( - [ - call.prompt( - "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", - type=click.INT, - ), - call.prompt( - "host (hostname for the instance)", default=None, hide_input=False, type=None - ), - call.prompt("port", default=5432, hide_input=False, type=click.INT), - call.prompt("user (dev username)", default=None, hide_input=False, type=None), - call.prompt("pass (dev password)", default=None, hide_input=True, type=None), - call.prompt( - "dbname (default database that dbt will build objects in)", - default=None, - hide_input=False, - type=None, - ), - call.prompt( - "schema (default schema that dbt will build objects in)", - default=None, - hide_input=False, - type=None, - ), - call.prompt("threads (1 or more)", default=1, hide_input=False, type=click.INT), - ] - ) - - with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: - assert ( - f.read() - == """test: - outputs: - dev: - dbname: test_db - host: localhost - pass: test_password - port: 5432 - schema: test_schema - threads: 4 - type: postgres - user: test_user - target: dev -""" - ) - - @patch.object(Path, "exists", autospec=True) - def test_init_task_in_project_without_profile_yml_specifying_profile_errors(self, exists): - def exists_side_effect(path): - # Override responses on specific files, default to 'real world' if not overriden - return {"profiles.yml": False}.get(path.name, os.path.exists(path)) - - exists.side_effect = exists_side_effect - - # Even through no profiles.yml file exists, the init will not modify project.yml, - # so this errors - with pytest.raises(DbtRuntimeError) as error: - run_dbt(["init", "--profile", "test"], expect_pass=False) - assert "Could not find profile named test" in str(error) - - -class TestInitProjectWithoutExistingProfilesYmlOrTemplate: - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.confirm") - @patch("click.prompt") - @patch.object(Path, "exists", autospec=True) - def test_init_task_in_project_without_existing_profiles_yml_or_profile_template( - self, exists, mock_prompt, mock_confirm, mock_get_adapter, project - ): - def exists_side_effect(path): - # Override responses on specific files, default to 'real world' if not overriden - return { - "profiles.yml": False, - "profile_template.yml": False, - }.get(path.name, os.path.exists(path)) - - exists.side_effect = exists_side_effect - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.attach_mock(mock_confirm, "confirm") - manager.prompt.side_effect = [ - 1, - ] - mock_get_adapter.return_value = [project.adapter.type()] - run_dbt(["init"]) - manager.assert_has_calls( - [ - call.prompt( - "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", - type=click.INT, - ), - ] - ) - - with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: - assert ( - f.read() - == """test: - outputs: - - dev: - type: postgres - threads: [1 or more] - host: [host] - port: [port] - user: [dev_username] - pass: [dev_password] - dbname: [dbname] - schema: [dev_schema] - - prod: - type: postgres - threads: [1 or more] - host: [host] - port: [port] - user: [prod_username] - pass: [prod_password] - dbname: [dbname] - schema: [prod_schema] - - target: dev -""" - ) - - -class TestInitProjectWithProfileTemplateWithoutExistingProfilesYml: - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.confirm") - @patch("click.prompt") - @patch.object(Path, "exists", autospec=True) - def test_init_task_in_project_with_profile_template_without_existing_profiles_yml( - self, exists, mock_prompt, mock_confirm, mock_get_adapter, project - ): - def exists_side_effect(path): - # Override responses on specific files, default to 'real world' if not overriden - return { - "profiles.yml": False, - }.get(path.name, os.path.exists(path)) - - exists.side_effect = exists_side_effect - - with open("profile_template.yml", "w") as f: - f.write( - """fixed: - type: postgres - threads: 4 - host: localhost - dbname: my_db - schema: my_schema - target: my_target -prompts: - target: - hint: 'The target name' - type: string - port: - hint: 'The port (for integer test purposes)' - type: int - default: 5432 - user: - hint: 'Your username' - pass: - hint: 'Your password' - hide_input: true""" - ) - - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.attach_mock(mock_confirm, "confirm") - manager.prompt.side_effect = ["my_target", 5432, "test_username", "test_password"] - mock_get_adapter.return_value = [project.adapter.type()] - run_dbt(["init"]) - manager.assert_has_calls( - [ - call.prompt( - "target (The target name)", default=None, hide_input=False, type=click.STRING - ), - call.prompt( - "port (The port (for integer test purposes))", - default=5432, - hide_input=False, - type=click.INT, - ), - call.prompt("user (Your username)", default=None, hide_input=False, type=None), - call.prompt("pass (Your password)", default=None, hide_input=True, type=None), - ] - ) - - with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: - assert ( - f.read() - == """test: - outputs: - my_target: - dbname: my_db - host: localhost - pass: test_password - port: 5432 - schema: my_schema - threads: 4 - type: postgres - user: test_username - target: my_target -""" - ) - - -class TestInitInvalidProfileTemplate: - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.confirm") - @patch("click.prompt") - def test_init_task_in_project_with_invalid_profile_template( - self, mock_prompt, mock_confirm, mock_get_adapter, project - ): - """Test that when an invalid profile_template.yml is provided in the project, - init command falls back to the target's profile_template.yml""" - with open(os.path.join(project.project_root, "profile_template.yml"), "w") as f: - f.write("""invalid template""") - - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.attach_mock(mock_confirm, "confirm") - manager.confirm.side_effect = ["y"] - manager.prompt.side_effect = [ - 1, - "localhost", - 5432, - "test_username", - "test_password", - "test_db", - "test_schema", - 4, - ] - mock_get_adapter.return_value = [project.adapter.type()] - - run_dbt(["init"]) - - manager.assert_has_calls( - [ - call.confirm( - f"The profile test already exists in {os.path.join(project.profiles_dir, 'profiles.yml')}. Continue and overwrite it?" - ), - call.prompt( - "Which database would you like to use?\n[1] postgres\n\n(Don't see the one you want? https://docs.getdbt.com/docs/available-adapters)\n\nEnter a number", - type=click.INT, - ), - call.prompt( - "host (hostname for the instance)", default=None, hide_input=False, type=None - ), - call.prompt("port", default=5432, hide_input=False, type=click.INT), - call.prompt("user (dev username)", default=None, hide_input=False, type=None), - call.prompt("pass (dev password)", default=None, hide_input=True, type=None), - call.prompt( - "dbname (default database that dbt will build objects in)", - default=None, - hide_input=False, - type=None, - ), - call.prompt( - "schema (default schema that dbt will build objects in)", - default=None, - hide_input=False, - type=None, - ), - call.prompt("threads (1 or more)", default=1, hide_input=False, type=click.INT), - ] - ) - - with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: - assert ( - f.read() - == """test: - outputs: - dev: - dbname: test_db - host: localhost - pass: test_password - port: 5432 - schema: test_schema - threads: 4 - type: postgres - user: test_username - target: dev -""" - ) - - -class TestInitInsideOfProjectBase: - @pytest.fixture(scope="class") - def project_name(self, unique_schema): - return f"my_project_{unique_schema}" - - -class TestInitOutsideOfProjectBase: - @pytest.fixture(scope="class") - def project_name(self, unique_schema): - return f"my_project_{unique_schema}" - - @pytest.fixture(scope="class", autouse=True) - def setup(self, project): - # Start by removing the dbt_project.yml so that we're not in an existing project - os.remove(os.path.join(project.project_root, "dbt_project.yml")) - - -class TestInitInsideProjectAndSkipProfileSetup(TestInitInsideOfProjectBase): - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.confirm") - @patch("click.prompt") - def test_init_inside_project_and_skip_profile_setup( - self, mock_prompt, mock_confirm, mock_get, project, project_name - ): - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.attach_mock(mock_confirm, "confirm") - - assert Path("dbt_project.yml").exists() - - # skip interactive profile setup - run_dbt(["init", "--skip-profile-setup"]) - assert len(manager.mock_calls) == 0 - - -class TestInitOutsideOfProjectSpecifyingInvalidProfile(TestInitOutsideOfProjectBase): - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.prompt") - def test_init_task_outside_project_specifying_invalid_profile_errors( - self, mock_prompt, mock_get_adapter, project, project_name - ): - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.prompt.side_effect = [ - project_name, - ] - mock_get_adapter.return_value = [project.adapter.type()] - - with pytest.raises(DbtRuntimeError) as error: - run_dbt(["init", "--profile", "invalid"], expect_pass=False) - assert "Could not find profile named invalid" in str(error) - - manager.assert_has_calls( - [ - call.prompt("Enter a name for your project (letters, digits, underscore)"), - ] - ) - - -class TestInitOutsideOfProjectSpecifyingProfileNoProfilesYml(TestInitOutsideOfProjectBase): - @patch("dbt.task.init._get_adapter_plugin_names") - @patch("click.prompt") - def test_init_task_outside_project_specifying_profile_no_profiles_yml_errors( - self, mock_prompt, mock_get_adapter, project, project_name - ): - manager = Mock() - manager.attach_mock(mock_prompt, "prompt") - manager.prompt.side_effect = [ - project_name, - ] - mock_get_adapter.return_value = [project.adapter.type()] - - # Override responses on specific files, default to 'real world' if not overriden - original_isfile = os.path.isfile - with patch( - "os.path.isfile", - new=lambda path: {"profiles.yml": False}.get( - os.path.basename(path), original_isfile(path) - ), - ): - with pytest.raises(DbtRuntimeError) as error: - run_dbt(["init", "--profile", "test"], expect_pass=False) - assert "Could not find profile named invalid" in str(error) - - manager.assert_has_calls( - [ - call.prompt("Enter a name for your project (letters, digits, underscore)"), - ] - ) From a676c4e538d1e20c7d92d6989fcfbace531ad6ac Mon Sep 17 00:00:00 2001 From: Michelle Ark <MichelleArk@users.noreply.github.com> Date: Thu, 12 Sep 2024 16:18:49 -0400 Subject: [PATCH 096/114] dbt-postgres 'microbatch' strategy (#146) --- .changes/unreleased/Features-20240911-141416.yaml | 6 ++++++ dbt/adapters/postgres/impl.py | 2 +- .../materializations/incremental_strategies.sql | 11 +++++++++++ .../functional/adapter/test_incremental_microbatch.py | 7 +++++++ 4 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Features-20240911-141416.yaml create mode 100644 tests/functional/adapter/test_incremental_microbatch.py diff --git a/.changes/unreleased/Features-20240911-141416.yaml b/.changes/unreleased/Features-20240911-141416.yaml new file mode 100644 index 000000000..990a09d9f --- /dev/null +++ b/.changes/unreleased/Features-20240911-141416.yaml @@ -0,0 +1,6 @@ +kind: Features +body: 'Microbatch incremental strategy implementation: merge' +time: 2024-09-11T14:14:16.538536-04:00 +custom: + Author: michelleark + Issue: "149" diff --git a/dbt/adapters/postgres/impl.py b/dbt/adapters/postgres/impl.py index d49d334b2..b8d4f43d7 100644 --- a/dbt/adapters/postgres/impl.py +++ b/dbt/adapters/postgres/impl.py @@ -151,7 +151,7 @@ def valid_incremental_strategies(self): """The set of standard builtin strategies which this adapter supports out-of-the-box. Not used to validate custom strategies defined by end users. """ - return ["append", "delete+insert", "merge"] + return ["append", "delete+insert", "merge", "microbatch"] def debug_query(self): self.execute("select 1 as id") diff --git a/dbt/include/postgres/macros/materializations/incremental_strategies.sql b/dbt/include/postgres/macros/materializations/incremental_strategies.sql index f2fbf41e0..1d37366fd 100644 --- a/dbt/include/postgres/macros/materializations/incremental_strategies.sql +++ b/dbt/include/postgres/macros/materializations/incremental_strategies.sql @@ -7,3 +7,14 @@ {% endif %} {% endmacro %} + + +{% macro postgres__get_incremental_microbatch_sql(arg_dict) %} + + {% if arg_dict["unique_key"] %} + {% do return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) %} + {% else %} + {{ exceptions.raise_compiler_error("dbt-postgres 'microbatch' requires a `unique_key` config") }} + {% endif %} + +{% endmacro %} diff --git a/tests/functional/adapter/test_incremental_microbatch.py b/tests/functional/adapter/test_incremental_microbatch.py new file mode 100644 index 000000000..ce5855b65 --- /dev/null +++ b/tests/functional/adapter/test_incremental_microbatch.py @@ -0,0 +1,7 @@ +from dbt.tests.adapter.incremental.test_incremental_microbatch import ( + BaseMicrobatch, +) + + +class TestPostgresMicrobatch(BaseMicrobatch): + pass From d69a12f27a62a6ccc0366b2f9e7155938337b384 Mon Sep 17 00:00:00 2001 From: Gerda Shank <gerda@dbtlabs.com> Date: Fri, 20 Sep 2024 16:33:06 -0400 Subject: [PATCH 097/114] Allow configuring snapshot table column names (#145) --- .changes/unreleased/Features-20240903-160221.yaml | 6 ++++++ .../postgres/macros/materializations/snapshot_merge.sql | 8 +++++--- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 .changes/unreleased/Features-20240903-160221.yaml diff --git a/.changes/unreleased/Features-20240903-160221.yaml b/.changes/unreleased/Features-20240903-160221.yaml new file mode 100644 index 000000000..8409e2486 --- /dev/null +++ b/.changes/unreleased/Features-20240903-160221.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Allow configuring snapshot column names +time: 2024-09-03T16:02:21.069085-04:00 +custom: + Author: gshank + Issue: "144" diff --git a/dbt/include/postgres/macros/materializations/snapshot_merge.sql b/dbt/include/postgres/macros/materializations/snapshot_merge.sql index 807c70b6c..894ea4b49 100644 --- a/dbt/include/postgres/macros/materializations/snapshot_merge.sql +++ b/dbt/include/postgres/macros/materializations/snapshot_merge.sql @@ -2,12 +2,14 @@ {% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%} {%- set insert_cols_csv = insert_cols | join(', ') -%} + {%- set columns = config.get("snapshot_table_column_names") or get_snapshot_table_column_names() -%} + update {{ target }} - set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to + set {{ columns.dbt_valid_to }} = DBT_INTERNAL_SOURCE.{{ columns.dbt_valid_to }} from {{ source }} as DBT_INTERNAL_SOURCE - where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text + where DBT_INTERNAL_SOURCE.{{ columns.dbt_scd_id }}::text = {{ target }}.{{ columns.dbt_scd_id }}::text and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text) - and {{ target }}.dbt_valid_to is null; + and {{ target }}.{{ columns.dbt_valid_to }} is null; insert into {{ target }} ({{ insert_cols_csv }}) select {% for column in insert_cols -%} diff --git a/pyproject.toml b/pyproject.toml index e6848a4f7..726dc64a1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ classifiers = [ ] dependencies = [ "psycopg2-binary>=2.9,<3.0", - "dbt-adapters>=1.1.1,<2.0", + "dbt-adapters>=1.7.0,<2.0", # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency "dbt-core>=1.8.0", # installed via dbt-adapters but used directly From 2262a854e0b81d99783ce8213332a0c7a5a7f7cd Mon Sep 17 00:00:00 2001 From: GitHub Build Bot <buildbot@fishtownanalytics.com> Date: Wed, 25 Sep 2024 16:11:12 +0000 Subject: [PATCH 098/114] Generate changelog at .changes/1.9.0-b1.md --- .changes/1.9.0-b1.md | 31 ++++++++++++++++++ .../Features-20240430-185700.yaml | 0 .../Features-20240501-151856.yaml | 0 .../Features-20240731-210800.yaml | 0 .../Features-20240903-160221.yaml | 0 .../Features-20240911-141416.yaml | 0 .../Fixes-20240514-193201.yaml | 0 .../Fixes-20240605-202614.yaml | 0 .../Fixes-20240626-163930.yaml | 0 .../Under the Hood-20240716-172442.yaml | 0 .../Under the Hood-20240731-075011.yaml | 0 CHANGELOG.md | 32 +++++++++++++++++++ 12 files changed, 63 insertions(+) create mode 100644 .changes/1.9.0-b1.md rename .changes/{unreleased => 1.9.0}/Features-20240430-185700.yaml (100%) rename .changes/{unreleased => 1.9.0}/Features-20240501-151856.yaml (100%) rename .changes/{unreleased => 1.9.0}/Features-20240731-210800.yaml (100%) rename .changes/{unreleased => 1.9.0}/Features-20240903-160221.yaml (100%) rename .changes/{unreleased => 1.9.0}/Features-20240911-141416.yaml (100%) rename .changes/{unreleased => 1.9.0}/Fixes-20240514-193201.yaml (100%) rename .changes/{unreleased => 1.9.0}/Fixes-20240605-202614.yaml (100%) rename .changes/{unreleased => 1.9.0}/Fixes-20240626-163930.yaml (100%) rename .changes/{unreleased => 1.9.0}/Under the Hood-20240716-172442.yaml (100%) rename .changes/{unreleased => 1.9.0}/Under the Hood-20240731-075011.yaml (100%) diff --git a/.changes/1.9.0-b1.md b/.changes/1.9.0-b1.md new file mode 100644 index 000000000..9a06cee40 --- /dev/null +++ b/.changes/1.9.0-b1.md @@ -0,0 +1,31 @@ +## dbt-postgres 1.9.0-b1 - September 25, 2024 + +### Features + +- Add tests for cross-database `cast` macro ([#76](https://github.com/dbt-labs/dbt-postgres/issues/76)) +- Cross-database `date` macro ([#82](https://github.com/dbt-labs/dbt-postgres/issues/82)) +- Add support for Python 3.12 ([#17](https://github.com/dbt-labs/dbt-postgres/issues/17)) +- Allow configuring snapshot column names ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144)) +- Microbatch incremental strategy implementation: merge ([#149](https://github.com/dbt-labs/dbt-postgres/issues/149)) + +### Fixes + +- Fix the semicolon semantics for indexes while respecting other bug fix ([#85](https://github.com/dbt-labs/dbt-postgres/issues/85)) +- Default to psycopg2-binary and allow overriding to psycopg2 via DBT_PSYCOPG2_NAME (restores previous behavior) ([#96](https://github.com/dbt-labs/dbt-postgres/issues/96)) +- Fix `persist_docs` for `materialized_view` materializations. Previously, using this configuration with materialized view models would lead to an error. ([#120](https://github.com/dbt-labs/dbt-postgres/issues/120)) + +### Under the Hood + +- Add support for experimental record/replay testing. ([#123](https://github.com/dbt-labs/dbt-postgres/issues/123)) +- Updating changie.yaml to add contributors and PR links ([#109](https://github.com/dbt-labs/dbt-postgres/issues/109)) + +### Contributors +- [@dbeatty10](https://github.com/dbeatty10) ([#76](https://github.com/dbt-labs/dbt-postgres/issues/76), [#82](https://github.com/dbt-labs/dbt-postgres/issues/82)) +- [@gshank](https://github.com/gshank) ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144)) +- [@leahwicz](https://github.com/leahwicz) ([#109](https://github.com/dbt-labs/dbt-postgres/issues/109)) +- [@michelleark](https://github.com/michelleark) ([#149](https://github.com/dbt-labs/dbt-postgres/issues/149)) +- [@mikealfare](https://github.com/mikealfare) ([#17](https://github.com/dbt-labs/dbt-postgres/issues/17), [#96](https://github.com/dbt-labs/dbt-postgres/issues/96)) +- [@morsapaes](https://github.com/morsapaes) ([#120](https://github.com/dbt-labs/dbt-postgres/issues/120)) +- [@peterallenwebb](https://github.com/peterallenwebb) ([#123](https://github.com/dbt-labs/dbt-postgres/issues/123)) +- [@versusfacit](https://github.com/versusfacit) ([#85](https://github.com/dbt-labs/dbt-postgres/issues/85)) + diff --git a/.changes/unreleased/Features-20240430-185700.yaml b/.changes/1.9.0/Features-20240430-185700.yaml similarity index 100% rename from .changes/unreleased/Features-20240430-185700.yaml rename to .changes/1.9.0/Features-20240430-185700.yaml diff --git a/.changes/unreleased/Features-20240501-151856.yaml b/.changes/1.9.0/Features-20240501-151856.yaml similarity index 100% rename from .changes/unreleased/Features-20240501-151856.yaml rename to .changes/1.9.0/Features-20240501-151856.yaml diff --git a/.changes/unreleased/Features-20240731-210800.yaml b/.changes/1.9.0/Features-20240731-210800.yaml similarity index 100% rename from .changes/unreleased/Features-20240731-210800.yaml rename to .changes/1.9.0/Features-20240731-210800.yaml diff --git a/.changes/unreleased/Features-20240903-160221.yaml b/.changes/1.9.0/Features-20240903-160221.yaml similarity index 100% rename from .changes/unreleased/Features-20240903-160221.yaml rename to .changes/1.9.0/Features-20240903-160221.yaml diff --git a/.changes/unreleased/Features-20240911-141416.yaml b/.changes/1.9.0/Features-20240911-141416.yaml similarity index 100% rename from .changes/unreleased/Features-20240911-141416.yaml rename to .changes/1.9.0/Features-20240911-141416.yaml diff --git a/.changes/unreleased/Fixes-20240514-193201.yaml b/.changes/1.9.0/Fixes-20240514-193201.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240514-193201.yaml rename to .changes/1.9.0/Fixes-20240514-193201.yaml diff --git a/.changes/unreleased/Fixes-20240605-202614.yaml b/.changes/1.9.0/Fixes-20240605-202614.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240605-202614.yaml rename to .changes/1.9.0/Fixes-20240605-202614.yaml diff --git a/.changes/unreleased/Fixes-20240626-163930.yaml b/.changes/1.9.0/Fixes-20240626-163930.yaml similarity index 100% rename from .changes/unreleased/Fixes-20240626-163930.yaml rename to .changes/1.9.0/Fixes-20240626-163930.yaml diff --git a/.changes/unreleased/Under the Hood-20240716-172442.yaml b/.changes/1.9.0/Under the Hood-20240716-172442.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240716-172442.yaml rename to .changes/1.9.0/Under the Hood-20240716-172442.yaml diff --git a/.changes/unreleased/Under the Hood-20240731-075011.yaml b/.changes/1.9.0/Under the Hood-20240731-075011.yaml similarity index 100% rename from .changes/unreleased/Under the Hood-20240731-075011.yaml rename to .changes/1.9.0/Under the Hood-20240731-075011.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 5beb02ea0..fc95b95f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,38 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), and is generated by [Changie](https://github.com/miniscruff/changie). +## dbt-postgres 1.9.0-b1 - September 25, 2024 + +### Features + +- Add tests for cross-database `cast` macro ([#76](https://github.com/dbt-labs/dbt-postgres/issues/76)) +- Cross-database `date` macro ([#82](https://github.com/dbt-labs/dbt-postgres/issues/82)) +- Add support for Python 3.12 ([#17](https://github.com/dbt-labs/dbt-postgres/issues/17)) +- Allow configuring snapshot column names ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144)) +- Microbatch incremental strategy implementation: merge ([#149](https://github.com/dbt-labs/dbt-postgres/issues/149)) + +### Fixes + +- Fix the semicolon semantics for indexes while respecting other bug fix ([#85](https://github.com/dbt-labs/dbt-postgres/issues/85)) +- Default to psycopg2-binary and allow overriding to psycopg2 via DBT_PSYCOPG2_NAME (restores previous behavior) ([#96](https://github.com/dbt-labs/dbt-postgres/issues/96)) +- Fix `persist_docs` for `materialized_view` materializations. Previously, using this configuration with materialized view models would lead to an error. ([#120](https://github.com/dbt-labs/dbt-postgres/issues/120)) + +### Under the Hood + +- Add support for experimental record/replay testing. ([#123](https://github.com/dbt-labs/dbt-postgres/issues/123)) +- Updating changie.yaml to add contributors and PR links ([#109](https://github.com/dbt-labs/dbt-postgres/issues/109)) + +### Contributors +- [@dbeatty10](https://github.com/dbeatty10) ([#76](https://github.com/dbt-labs/dbt-postgres/issues/76), [#82](https://github.com/dbt-labs/dbt-postgres/issues/82)) +- [@gshank](https://github.com/gshank) ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144)) +- [@leahwicz](https://github.com/leahwicz) ([#109](https://github.com/dbt-labs/dbt-postgres/issues/109)) +- [@michelleark](https://github.com/michelleark) ([#149](https://github.com/dbt-labs/dbt-postgres/issues/149)) +- [@mikealfare](https://github.com/mikealfare) ([#17](https://github.com/dbt-labs/dbt-postgres/issues/17), [#96](https://github.com/dbt-labs/dbt-postgres/issues/96)) +- [@morsapaes](https://github.com/morsapaes) ([#120](https://github.com/dbt-labs/dbt-postgres/issues/120)) +- [@peterallenwebb](https://github.com/peterallenwebb) ([#123](https://github.com/dbt-labs/dbt-postgres/issues/123)) +- [@versusfacit](https://github.com/versusfacit) ([#85](https://github.com/dbt-labs/dbt-postgres/issues/85)) + + ## Previous Releases For information on prior major and minor releases, see their changelogs: - [1.8](https://github.com/dbt-labs/dbt-postgres/blob/1.8.latest/CHANGELOG.md) From 988e9959580434461706fdfafaca40f3bca877d0 Mon Sep 17 00:00:00 2001 From: GitHub Build Bot <buildbot@fishtownanalytics.com> Date: Wed, 25 Sep 2024 16:12:47 +0000 Subject: [PATCH 099/114] Bump version to 1.9.0b1 --- dbt/adapters/postgres/__version__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbt/adapters/postgres/__version__.py b/dbt/adapters/postgres/__version__.py index 6698ed64c..a4077fff2 100644 --- a/dbt/adapters/postgres/__version__.py +++ b/dbt/adapters/postgres/__version__.py @@ -1 +1 @@ -version = "1.9.0a1" +version = "1.9.0b1" From cb4a95392fe3156bf7bdf8e8e71d4fa3fab07675 Mon Sep 17 00:00:00 2001 From: Kshitij Aranke <kshitij@aranke.org> Date: Tue, 1 Oct 2024 18:54:02 +0100 Subject: [PATCH 100/114] Fix hook tests (#153) --- .changes/1.9.0-b1.md | 1 - tests/functional/schema_tests/test_schema_v2_tests.py | 8 ++++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.changes/1.9.0-b1.md b/.changes/1.9.0-b1.md index 9a06cee40..59bd21c56 100644 --- a/.changes/1.9.0-b1.md +++ b/.changes/1.9.0-b1.md @@ -28,4 +28,3 @@ - [@morsapaes](https://github.com/morsapaes) ([#120](https://github.com/dbt-labs/dbt-postgres/issues/120)) - [@peterallenwebb](https://github.com/peterallenwebb) ([#123](https://github.com/dbt-labs/dbt-postgres/issues/123)) - [@versusfacit](https://github.com/versusfacit) ([#85](https://github.com/dbt-labs/dbt-postgres/issues/85)) - diff --git a/tests/functional/schema_tests/test_schema_v2_tests.py b/tests/functional/schema_tests/test_schema_v2_tests.py index aae164a1d..68158efd7 100644 --- a/tests/functional/schema_tests/test_schema_v2_tests.py +++ b/tests/functional/schema_tests/test_schema_v2_tests.py @@ -472,9 +472,9 @@ def test_hooks_do_run_for_tests( ): # This passes now that hooks run, a behavior we changed in v1.0 results = run_dbt(["test", "--model", "ephemeral"]) - assert len(results) == 1 + assert len(results) == 3 for result in results: - assert result.status == "pass" + assert result.status in ("pass", "success") assert not result.skipped assert result.failures == 0, "test {} failed".format(result.node.name) @@ -505,9 +505,9 @@ def test_these_hooks_dont_run_for_tests( ): # This would fail if the hooks ran results = run_dbt(["test", "--model", "ephemeral"]) - assert len(results) == 1 + assert len(results) == 3 for result in results: - assert result.status == "pass" + assert result.status in ("pass", "success") assert not result.skipped assert result.failures == 0, "test {} failed".format(result.node.name) From ed94c0d2e7a73b1b2b3d34643c71c02b3c4e1147 Mon Sep 17 00:00:00 2001 From: Gerda Shank <gerda@dbtlabs.com> Date: Thu, 10 Oct 2024 17:00:38 -0400 Subject: [PATCH 101/114] Enable setting dbt_valid_to snapshot column to new setting dbt_valid_to_current (#152) Co-authored-by: Kshitij Aranke <kshitij@aranke.org> --- .changes/unreleased/Features-20240927-133708.yaml | 6 ++++++ .../postgres/macros/materializations/snapshot_merge.sql | 7 ++++++- 2 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 .changes/unreleased/Features-20240927-133708.yaml diff --git a/.changes/unreleased/Features-20240927-133708.yaml b/.changes/unreleased/Features-20240927-133708.yaml new file mode 100644 index 000000000..b2dba3337 --- /dev/null +++ b/.changes/unreleased/Features-20240927-133708.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Enable setting current value of dbt_valid_to +time: 2024-09-27T13:37:08.808843-04:00 +custom: + Author: gshank + Issue: "151" diff --git a/dbt/include/postgres/macros/materializations/snapshot_merge.sql b/dbt/include/postgres/macros/materializations/snapshot_merge.sql index 894ea4b49..0b4deb1b6 100644 --- a/dbt/include/postgres/macros/materializations/snapshot_merge.sql +++ b/dbt/include/postgres/macros/materializations/snapshot_merge.sql @@ -9,7 +9,12 @@ from {{ source }} as DBT_INTERNAL_SOURCE where DBT_INTERNAL_SOURCE.{{ columns.dbt_scd_id }}::text = {{ target }}.{{ columns.dbt_scd_id }}::text and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text) - and {{ target }}.{{ columns.dbt_valid_to }} is null; + {% if config.get("dbt_valid_to_current") %} + and ({{ target }}.{{ columns.dbt_valid_to }} = {{ config.get('dbt_valid_to_current') }} or {{ target }}.{{ columns.dbt_valid_to }} is null); + {% else %} + and {{ target }}.{{ columns.dbt_valid_to }} is null; + {% endif %} + insert into {{ target }} ({{ insert_cols_csv }}) select {% for column in insert_cols -%} From 20cdf8fa2904472cd9cf37dc0b1137737d67178d Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Wed, 16 Oct 2024 18:42:59 -0400 Subject: [PATCH 102/114] Remove tests that are covered in dbt-core (#162) --- tests/functional/logging/test_logging.py | 98 ------------------- tests/functional/logging/test_meta_logging.py | 46 --------- 2 files changed, 144 deletions(-) delete mode 100644 tests/functional/logging/test_logging.py delete mode 100644 tests/functional/logging/test_meta_logging.py diff --git a/tests/functional/logging/test_logging.py b/tests/functional/logging/test_logging.py deleted file mode 100644 index a7e226eb3..000000000 --- a/tests/functional/logging/test_logging.py +++ /dev/null @@ -1,98 +0,0 @@ -import json -import os - -from dbt.events.types import InvalidOptionYAML -from dbt.tests.util import get_manifest, run_dbt, read_file -from dbt_common.events.functions import fire_event -import pytest - - -@pytest.fixture(scope="class") -def models(): - return {"my_model.sql": "select 1 as fun"} - - -# This test checks that various events contain node_info, -# which is supplied by the log_contextvars context manager -def test_basic(project, logs_dir): - results = run_dbt(["--log-format=json", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - assert "model.test.my_model" in manifest.nodes - - # get log file - log_file = read_file(logs_dir, "dbt.log") - assert log_file - node_start = False - node_finished = False - connection_reused_data = [] - for log_line in log_file.split("\n"): - # skip empty lines - if len(log_line) == 0: - continue - # The adapter logging also shows up, so skip non-json lines - if "[debug]" in log_line: - continue - log_dct = json.loads(log_line) - log_data = log_dct["data"] - log_event = log_dct["info"]["name"] - if log_event == "ConnectionReused": - connection_reused_data.append(log_data) - if log_event == "NodeStart": - node_start = True - if log_event == "NodeFinished": - node_finished = True - assert log_data["run_result"]["adapter_response"] - if node_start and not node_finished: - if log_event == "NodeExecuting": - assert "node_info" in log_data - if log_event == "JinjaLogDebug": - assert "node_info" in log_data - if log_event == "SQLQuery": - assert "node_info" in log_data - if log_event == "TimingInfoCollected": - assert "node_info" in log_data - assert "timing_info" in log_data - - # windows doesn't have the same thread/connection flow so the ConnectionReused - # events don't show up - if os.name != "nt": - # Verify the ConnectionReused event occurs and has the right data - assert connection_reused_data - for data in connection_reused_data: - assert "conn_name" in data and data["conn_name"] - assert "orig_conn_name" in data and data["orig_conn_name"] - - -def test_formatted_logs(project, logs_dir): - # a basic run of dbt with a single model should have 5 `Formatting` events in the json logs - results = run_dbt(["--log-format=json", "run"]) - assert len(results) == 1 - - # get log file - json_log_file = read_file(logs_dir, "dbt.log") - formatted_json_lines = 0 - for log_line in json_log_file.split("\n"): - # skip the empty line at the end - if len(log_line) == 0: - continue - log_dct = json.loads(log_line) - log_event = log_dct["info"]["name"] - if log_event == "Formatting": - formatted_json_lines += 1 - - assert formatted_json_lines == 5 - - -def test_invalid_event_value(project, logs_dir): - results = run_dbt(["--log-format=json", "run"]) - assert len(results) == 1 - with pytest.raises(Exception): - # This should raise because positional arguments are provided to the event - fire_event(InvalidOptionYAML("testing")) - - # Provide invalid type to "option_name" - with pytest.raises(Exception) as excinfo: - fire_event(InvalidOptionYAML(option_name=1)) - - assert str(excinfo.value) == "[InvalidOptionYAML]: Unable to parse dict {'option_name': 1}" diff --git a/tests/functional/logging/test_meta_logging.py b/tests/functional/logging/test_meta_logging.py deleted file mode 100644 index 7c535bce7..000000000 --- a/tests/functional/logging/test_meta_logging.py +++ /dev/null @@ -1,46 +0,0 @@ -import json - -from dbt.tests.util import read_file, run_dbt -import pytest - - -model1 = "select 1 as fun" -model2 = '{{ config(meta={"owners": ["team1", "team2"]})}} select 1 as fun' -model3 = '{{ config(meta={"key": 1})}} select 1 as fun' - - -@pytest.fixture(scope="class") # noqa -def models(): - return {"model1.sql": model1, "model2.sql": model2, "model3.sql": model3} - - -# This test checks that various events contain node_info, -# which is supplied by the log_contextvars context manager -def test_meta(project, logs_dir): - run_dbt(["--log-format=json", "run"]) - - # get log file - log_file = read_file(logs_dir, "dbt.log") - assert log_file - - for log_line in log_file.split("\n"): - # skip empty lines - if len(log_line) == 0: - continue - # The adapter logging also shows up, so skip non-json lines - if "[debug]" in log_line: - continue - - log_dct = json.loads(log_line) - if "node_info" not in log_dct["data"]: - continue - - print(f"--- log_dct: {log_dct}") - node_info = log_dct["data"]["node_info"] - node_path = node_info["node_path"] - if node_path == "model1.sql": - assert node_info["meta"] == {} - elif node_path == "model2.sql": - assert node_info["meta"] == {"owners": ["team1", "team2"]} - elif node_path == "model3.sql": - assert node_info["meta"] == {"key": 1} From 352244812ec913ea342c1c86d7161fdb75f1e9b4 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 17 Oct 2024 09:12:57 -0400 Subject: [PATCH 103/114] Drop support for Python 3.8 (#161) --- .../Breaking Changes-20241016-175527.yaml | 6 ++ .github/workflows/integration-tests.yml | 4 +- .github/workflows/unit-tests.yml | 2 +- .pre-commit-config.yaml | 1 - docker/Dockerfile | 4 +- docker/README.md | 9 ++- docker/dev.Dockerfile | 58 +++++++++---------- pyproject.toml | 6 +- 8 files changed, 46 insertions(+), 44 deletions(-) create mode 100644 .changes/unreleased/Breaking Changes-20241016-175527.yaml diff --git a/.changes/unreleased/Breaking Changes-20241016-175527.yaml b/.changes/unreleased/Breaking Changes-20241016-175527.yaml new file mode 100644 index 000000000..d1e4df8fb --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20241016-175527.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: Drop support for Python 3.8 +time: 2024-10-16T17:55:27.844499-04:00 +custom: + Author: mikealfare + Issue: "161" diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 374908f46..0e5f41273 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -82,7 +82,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] services: postgres: @@ -148,7 +148,7 @@ jobs: fail-fast: false matrix: platform: [ubuntu-22.04, macos-12] - python-version: ["3.8", "3.12"] + python-version: ["3.9", "3.12"] steps: - name: "Check out repository" uses: actions/checkout@v4 diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index afb88136c..5805c9821 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -23,7 +23,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - name: Check out repository diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0bd01f7cc..688eb244a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -23,7 +23,6 @@ repos: - id: black args: - --line-length=99 - - --target-version=py38 - --target-version=py39 - --target-version=py310 - --target-version=py311 diff --git a/docker/Dockerfile b/docker/Dockerfile index b6a87dfc6..cfbc81aed 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,7 +1,7 @@ # this image gets published to GHCR for production use ARG py_version=3.12.4 -FROM python:$py_version-slim-bullseye as base +FROM python:$py_version-slim-bullseye AS base RUN apt-get update \ && apt-get dist-upgrade -y \ @@ -25,7 +25,7 @@ ENV LANG=C.UTF-8 RUN python -m pip install --upgrade "pip==24.0" "setuptools==69.2.0" "wheel==0.43.0" --no-cache-dir -FROM base as dbt-postgres +FROM base AS dbt-postgres ARG commit_ref=main diff --git a/docker/README.md b/docker/README.md index 22af3fe93..f571eebda 100644 --- a/docker/README.md +++ b/docker/README.md @@ -24,20 +24,19 @@ docker build --tag <your_image_name> \ ``` ### Examples: -To build an image named "my-dbt" that supports Snowflake using the latest releases: +To build an image named "my-dbt" that supports Postgres using the latest releases: ```shell -cd dbt-core/docker docker build --tag my-dbt --target dbt-postgres . ``` -To build an image named "my-other-dbt" that supports Snowflake using the adapter version 1.0.0b1: +To build an image named "my-other-dbt" that supports Postgres using the adapter version 1.8.0: ```shell cd dbt-core/docker docker build \ --tag my-other-dbt \ --target dbt-postgres \ - --build-arg commit_ref=v1.0.0b1 \ - . + --build-arg commit_ref=v1.8.0 \ + . ``` ## Running an image in a container: diff --git a/docker/dev.Dockerfile b/docker/dev.Dockerfile index e137ff846..280ff39f7 100644 --- a/docker/dev.Dockerfile +++ b/docker/dev.Dockerfile @@ -1,47 +1,47 @@ # this image does not get published, it is intended for local development only, see `Makefile` for usage -FROM ubuntu:24.04 as base +FROM ubuntu:24.04 AS base # prevent python installation from asking for time zone region ARG DEBIAN_FRONTEND=noninteractive # add python repository RUN apt-get update \ - && apt-get install -y software-properties-common=0.99.22.9 \ - && add-apt-repository -y ppa:deadsnakes/ppa \ - && apt-get clean \ - && rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* + && apt-get install -y software-properties-common=0.99.48 \ + && add-apt-repository -y ppa:deadsnakes/ppa \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* # install python RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - build-essential=12.9ubuntu3 \ - git-all=1:2.34.1-1ubuntu1.10 \ - libpq-dev=14.11-0ubuntu0.22.04.1 \ - python3.8=3.8.19-1+jammy1 \ - python3.8-dev=3.8.19-1+jammy1 \ - python3.8-distutils=3.8.19-1+jammy1 \ - python3.8-venv=3.8.19-1+jammy1 \ - python3-pip=22.0.2+dfsg-1ubuntu0.4 \ - python3-wheel=0.37.1-2ubuntu0.22.04.1 \ - && apt-get clean \ - && rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* + && apt-get install -y --no-install-recommends \ + build-essential=12.10ubuntu1 \ + git-all=1:2.43.0-1ubuntu7.1 \ + libpq-dev=16.4-0ubuntu0.24.04.2 \ + python3.9=3.9.20-1+noble1 \ + python3.9-dev=3.9.20-1+noble1 \ + python3.9-distutils=3.9.20-1+noble1 \ + python3.9-venv=3.9.20-1+noble1 \ + python3-pip=24.0+dfsg-1ubuntu1 \ + python3-wheel=0.42.0-2 \ + && apt-get clean \ + && rm -rf \ + /var/lib/apt/lists/* \ + /tmp/* \ + /var/tmp/* # update the default system interpreter to the newly installed version -RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.8 1 +RUN update-alternatives --install /usr/bin/python python /usr/bin/python3.9 1 # install python dependencies -RUN python3 -m pip install --upgrade --no-cache-dir "hatch==1.9.1" +RUN python -m pip install --upgrade "hatch==1.13.0" --no-cache-dir --compile -FROM base as dbt-postgres-dev +FROM base AS dbt-postgres-dev -HEALTHCHECK CMD python3 --version || exit 1 +HEALTHCHECK CMD python --version || exit 1 # send stdout/stderr to terminal ENV PYTHONUNBUFFERED=1 @@ -50,5 +50,5 @@ ENV PYTHONUNBUFFERED=1 WORKDIR /opt/code VOLUME /opt/code -# create a virtual environment -RUN python3 -m venv /opt/venv +# setup hatch virtual envs +RUN hatch config set dirs.env.virtual ".hatch" diff --git a/pyproject.toml b/pyproject.toml index 726dc64a1..f3aa52c1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ name = "dbt-postgres" description = "The set of adapter protocols and base functionality that supports integration with dbt-core" readme = "README.md" keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs", "postgres"] -requires-python = ">=3.8.0" +requires-python = ">=3.9.0" authors = [ { name = "dbt Labs", email = "info@dbtlabs.com" }, ] @@ -17,7 +17,6 @@ classifiers = [ "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -58,8 +57,7 @@ dependencies = [ "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", - 'pre-commit==3.7.0;python_version>="3.9"', - 'pre-commit==3.5.0;python_version=="3.8"', + "pre-commit==3.7.0", "freezegun", "pytest", "pytest-dotenv", From 80483a0162b7a557054b09eef4f6d86efda3694d Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Thu, 17 Oct 2024 20:05:03 -0400 Subject: [PATCH 104/114] Remove duplicate tests that should exist in dbt-core (#166) --- tests/functional/schema_tests/data/seed.sql | 117 -- .../schema_tests/data/seed_failure.sql | 116 -- tests/functional/schema_tests/fixtures.py | 1275 ----------------- .../schema_tests/test_schema_v2_tests.py | 1110 -------------- 4 files changed, 2618 deletions(-) delete mode 100644 tests/functional/schema_tests/data/seed.sql delete mode 100644 tests/functional/schema_tests/data/seed_failure.sql delete mode 100644 tests/functional/schema_tests/fixtures.py delete mode 100644 tests/functional/schema_tests/test_schema_v2_tests.py diff --git a/tests/functional/schema_tests/data/seed.sql b/tests/functional/schema_tests/data/seed.sql deleted file mode 100644 index 8f1801504..000000000 --- a/tests/functional/schema_tests/data/seed.sql +++ /dev/null @@ -1,117 +0,0 @@ -create table {schema}.seed ( - favorite_color VARCHAR(10), - id INTEGER, - first_name VARCHAR(11), - email VARCHAR(31), - - net_worth NUMERIC(12, 2) DEFAULT '100.00', - fav_number NUMERIC DEFAULT '3.14159265', - - ip_address VARCHAR(15), - updated_at TIMESTAMP WITHOUT TIME ZONE -); - - -INSERT INTO {schema}.seed - ("favorite_color", "id","first_name","email","ip_address","updated_at") -VALUES - ('blue', 1,'Larry',null,'69.135.206.194','2008-09-12 19:08:31'), - ('blue', 2,'Larry',null,'64.210.133.162','1978-05-09 04:15:14'), - ('blue', 3,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114','2011-10-16 04:07:57'), - ('blue', 4,'Sandra','sgeorge3@livejournal.com','229.235.252.98','1973-07-19 10:52:43'), - ('blue', 5,'Fred','fwoods4@google.cn','78.229.170.124','2012-09-30 16:38:29'), - ('blue', 6,'Stephen','shanson5@livejournal.com','182.227.157.105','1995-11-07 21:40:50'), - ('blue', 7,'William','wmartinez6@upenn.edu','135.139.249.50','1982-09-05 03:11:59'), - ('blue', 8,'Jessica','jlong7@hao123.com','203.62.178.210','1991-10-16 11:03:15'), - ('blue', 9,'Douglas','dwhite8@tamu.edu','178.187.247.1','1979-10-01 09:49:48'), - ('blue', 10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249','2011-05-26 07:45:49'), - ('blue', 11,'Ralph','rfieldsa@home.pl','55.152.163.149','1972-11-18 19:06:11'), - ('blue', 12,'Louise','lnicholsb@samsung.com','141.116.153.154','2014-11-25 20:56:14'), - ('blue', 13,'Clarence','cduncanc@sfgate.com','81.171.31.133','2011-11-17 07:02:36'), - ('blue', 14,'Daniel','dfranklind@omniture.com','8.204.211.37','1980-09-13 00:09:04'), - ('blue', 15,'Katherine','klanee@auda.org.au','176.96.134.59','1997-08-22 19:36:56'), - ('blue', 16,'Billy','bwardf@wikia.com','214.108.78.85','2003-10-19 02:14:47'), - ('blue', 17,'Annie','agarzag@ocn.ne.jp','190.108.42.70','1988-10-28 15:12:35'), - ('blue', 18,'Shirley','scolemanh@fastcompany.com','109.251.164.84','1988-08-24 10:50:57'), - ('blue', 19,'Roger','rfrazieri@scribd.com','38.145.218.108','1985-12-31 15:17:15'), - ('blue', 20,'Lillian','lstanleyj@goodreads.com','47.57.236.17','1970-06-08 02:09:05'), - ('blue', 21,'Aaron','arodriguezk@nps.gov','205.245.118.221','1985-10-11 23:07:49'), - ('blue', 22,'Patrick','pparkerl@techcrunch.com','19.8.100.182','2006-03-29 12:53:56'), - ('blue', 23,'Phillip','pmorenom@intel.com','41.38.254.103','2011-11-07 15:35:43'), - ('blue', 24,'Henry','hgarcian@newsvine.com','1.191.216.252','2008-08-28 08:30:44'), - ('blue', 25,'Irene','iturnero@opera.com','50.17.60.190','1994-04-01 07:15:02'), - ('blue', 26,'Andrew','adunnp@pen.io','123.52.253.176','2000-11-01 06:03:25'), - ('blue', 27,'David','dgutierrezq@wp.com','238.23.203.42','1988-01-25 07:29:18'), - ('blue', 28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185','1983-01-01 13:36:37'), - ('blue', 29,'Evelyn','epetersons@gizmodo.com','32.80.46.119','1979-07-16 17:24:12'), - ('blue', 30,'Tammy','tmitchellt@purevolume.com','249.246.167.88','2001-04-03 10:00:23'), - ('blue', 31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47','1986-02-11 21:35:50'), - ('blue', 32,'Earl','eortizv@opera.com','166.47.248.240','1996-07-06 08:16:27'), - ('blue', 33,'Juan','jgordonw@sciencedirect.com','71.77.2.200','1987-01-31 03:46:44'), - ('blue', 34,'Diane','dhowellx@nyu.edu','140.94.133.12','1994-06-11 02:30:05'), - ('blue', 35,'Randy','rkennedyy@microsoft.com','73.255.34.196','2005-05-26 20:28:39'), - ('blue', 36,'Janice','jriveraz@time.com','22.214.227.32','1990-02-09 04:16:52'), - ('blue', 37,'Laura','lperry10@diigo.com','159.148.145.73','2015-03-17 05:59:25'), - ('blue', 38,'Gary','gray11@statcounter.com','40.193.124.56','1970-01-27 10:04:51'), - ('blue', 39,'Jesse','jmcdonald12@typepad.com','31.7.86.103','2009-03-14 08:14:29'), - ('blue', 40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239','1993-05-21 14:08:54'), - ('blue', 41,'Scott','smoore14@archive.org','38.238.46.83','1980-08-30 11:16:56'), - ('blue', 42,'Phillip','pevans15@cisco.com','158.234.59.34','2011-12-15 23:26:31'), - ('blue', 43,'Steven','sriley16@google.ca','90.247.57.68','2011-10-29 19:03:28'), - ('blue', 44,'Deborah','dbrown17@hexun.com','179.125.143.240','1995-04-10 14:36:07'), - ('blue', 45,'Lori','lross18@ow.ly','64.80.162.180','1980-12-27 16:49:15'), - ('blue', 46,'Sean','sjackson19@tumblr.com','240.116.183.69','1988-06-12 21:24:45'), - ('blue', 47,'Terry','tbarnes1a@163.com','118.38.213.137','1997-09-22 16:43:19'), - ('blue', 48,'Dorothy','dross1b@ebay.com','116.81.76.49','2005-02-28 13:33:24'), - ('blue', 49,'Samuel','swashington1c@house.gov','38.191.253.40','1989-01-19 21:15:48'), - ('blue', 50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174','2007-08-11 10:21:49'), - ('green', 51,'Wayne','whudson1e@princeton.edu','90.61.24.102','1983-07-03 16:58:12'), - ('green', 52,'Rose','rjames1f@plala.or.jp','240.83.81.10','1995-06-08 11:46:23'), - ('green', 53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145','2016-09-19 14:45:51'), - ('green', 54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94','1976-08-17 11:26:19'), - ('green', 55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45','2006-05-27 16:51:40'), - ('green', 56,'Johnny','jvasquez1j@trellian.com','118.202.238.23','1975-11-17 08:42:32'), - ('green', 57,'Patrick','pramirez1k@tamu.edu','231.25.153.198','1997-08-06 11:51:09'), - ('green', 58,'Helen','hlarson1l@prweb.com','8.40.21.39','1993-08-04 19:53:40'), - ('green', 59,'Patricia','pspencer1m@gmpg.org','212.198.40.15','1977-08-03 16:37:27'), - ('green', 60,'Joseph','jspencer1n@marriott.com','13.15.63.238','2005-07-23 20:22:06'), - ('green', 61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190','1976-05-19 21:47:44'), - ('green', 62,'Joan','jwebb1p@google.ru','105.229.170.71','1972-09-07 17:53:47'), - ('green', 63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244','2000-01-01 22:33:37'), - ('green', 64,'Katherine','khunter1r@smh.com.au','248.168.205.32','1991-01-09 06:40:24'), - ('green', 65,'Laura','lvasquez1s@wiley.com','128.129.115.152','1997-10-23 12:04:56'), - ('green', 66,'Juan','jdunn1t@state.gov','44.228.124.51','2004-11-10 05:07:35'), - ('green', 67,'Judith','jholmes1u@wiley.com','40.227.179.115','1977-08-02 17:01:45'), - ('green', 68,'Beverly','bbaker1v@wufoo.com','208.34.84.59','2016-03-06 20:07:23'), - ('green', 69,'Lawrence','lcarr1w@flickr.com','59.158.212.223','1988-09-13 06:07:21'), - ('green', 70,'Gloria','gwilliams1x@mtv.com','245.231.88.33','1995-03-18 22:32:46'), - ('green', 71,'Steven','ssims1y@cbslocal.com','104.50.58.255','2001-08-05 21:26:20'), - ('green', 72,'Betty','bmills1z@arstechnica.com','103.177.214.220','1981-12-14 21:26:54'), - ('green', 73,'Mildred','mfuller20@prnewswire.com','151.158.8.130','2000-04-19 10:13:55'), - ('green', 74,'Donald','dday21@icq.com','9.178.102.255','1972-12-03 00:58:24'), - ('green', 75,'Eric','ethomas22@addtoany.com','85.2.241.227','1992-11-01 05:59:30'), - ('green', 76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36','1985-10-24 06:50:01'), - ('green', 77,'Maria','mmartinez24@amazonaws.com','143.189.167.135','2005-10-05 05:17:42'), - ('green', 78,'Harry','hburton25@youtube.com','156.47.176.237','1978-03-26 05:53:33'), - ('green', 79,'Kevin','klawrence26@hao123.com','79.136.183.83','1994-10-12 04:38:52'), - ('green', 80,'David','dhall27@prweb.com','133.149.172.153','1976-12-15 16:24:24'), - ('green', 81,'Kathy','kperry28@twitter.com','229.242.72.228','1979-03-04 02:58:56'), - ('green', 82,'Adam','aprice29@elegantthemes.com','13.145.21.10','1982-11-07 11:46:59'), - ('green', 83,'Brandon','bgriffin2a@va.gov','73.249.128.212','2013-10-30 05:30:36'), - ('green', 84,'Henry','hnguyen2b@discovery.com','211.36.214.242','1985-01-09 06:37:27'), - ('green', 85,'Eric','esanchez2c@edublogs.org','191.166.188.251','2004-05-01 23:21:42'), - ('green', 86,'Jason','jlee2d@jimdo.com','193.92.16.182','1973-01-08 09:05:39'), - ('green', 87,'Diana','drichards2e@istockphoto.com','19.130.175.245','1994-10-05 22:50:49'), - ('green', 88,'Andrea','awelch2f@abc.net.au','94.155.233.96','2002-04-26 08:41:44'), - ('green', 89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111','2003-08-25 07:56:39'), - ('green', 90,'Jane','jsims2h@seesaa.net','43.4.220.135','1987-03-20 20:39:04'), - ('green', 91,'Larry','lgrant2i@si.edu','97.126.79.34','2000-09-07 20:26:19'), - ('green', 92,'Louis','ldean2j@prnewswire.com','37.148.40.127','2011-09-16 20:12:14'), - ('green', 93,'Jennifer','jcampbell2k@xing.com','38.106.254.142','1988-07-15 05:06:49'), - ('green', 94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187','2009-12-15 06:16:54'), - ('green', 95,'Lori','lstevens2m@icq.com','181.250.181.58','1984-10-28 03:29:19'), - ('green', 96,'Judy','jsimpson2n@marriott.com','180.121.239.219','1986-02-07 15:18:10'), - ('green', 97,'Phillip','phoward2o@usa.gov','255.247.0.175','2002-12-26 08:44:45'), - ('green', 98,'Gloria','gwalker2p@usa.gov','156.140.7.128','1997-10-04 07:58:58'), - ('green', 99,'Paul','pjohnson2q@umn.edu','183.59.198.197','1991-11-14 12:33:55'), - ('green', 100,'Frank','fgreene2r@blogspot.com','150.143.68.121','2010-06-12 23:55:39'); diff --git a/tests/functional/schema_tests/data/seed_failure.sql b/tests/functional/schema_tests/data/seed_failure.sql deleted file mode 100644 index f68c4591a..000000000 --- a/tests/functional/schema_tests/data/seed_failure.sql +++ /dev/null @@ -1,116 +0,0 @@ -create table {schema}.seed_failure ( - favorite_color VARCHAR(10), - id INTEGER, - first_name VARCHAR(11), - email VARCHAR(31), - ip_address VARCHAR(15), - updated_at TIMESTAMP WITHOUT TIME ZONE -); - - -INSERT INTO {schema}.seed_failure - ("favorite_color", "id","first_name","email","ip_address","updated_at") -VALUES - -- unaccepted 'red' favorite_color - ('red', 1,'Larry','lking0@miitbeian.gov.cn','69.135.206.194','2008-09-12 19:08:31'), - -- dupicate unique field (id=1) - ('blue', 1,'Larry','lperkins1@toplist.cz','64.210.133.162','1978-05-09 04:15:14'), - -- null not_null field (id) - ('blue', null,'Anna','amontgomery2@miitbeian.gov.cn','168.104.64.114','2011-10-16 04:07:57'), - ('blue', 4,'Sandra','sgeorge3@livejournal.com','229.235.252.98','1973-07-19 10:52:43'), - ('blue', 5,'Fred','fwoods4@google.cn','78.229.170.124','2012-09-30 16:38:29'), - ('blue', 6,'Stephen','shanson5@livejournal.com','182.227.157.105','1995-11-07 21:40:50'), - ('blue', 7,'William','wmartinez6@upenn.edu','135.139.249.50','1982-09-05 03:11:59'), - ('blue', 8,'Jessica','jlong7@hao123.com','203.62.178.210','1991-10-16 11:03:15'), - ('blue', 9,'Douglas','dwhite8@tamu.edu','178.187.247.1','1979-10-01 09:49:48'), - ('blue', 10,'Lisa','lcoleman9@nydailynews.com','168.234.128.249','2011-05-26 07:45:49'), - ('blue', 11,'Ralph','rfieldsa@home.pl','55.152.163.149','1972-11-18 19:06:11'), - ('blue', 12,'Louise','lnicholsb@samsung.com','141.116.153.154','2014-11-25 20:56:14'), - ('blue', 13,'Clarence','cduncanc@sfgate.com','81.171.31.133','2011-11-17 07:02:36'), - ('blue', 14,'Daniel','dfranklind@omniture.com','8.204.211.37','1980-09-13 00:09:04'), - ('blue', 15,'Katherine','klanee@auda.org.au','176.96.134.59','1997-08-22 19:36:56'), - ('blue', 16,'Billy','bwardf@wikia.com','214.108.78.85','2003-10-19 02:14:47'), - ('blue', 17,'Annie','agarzag@ocn.ne.jp','190.108.42.70','1988-10-28 15:12:35'), - ('blue', 18,'Shirley','scolemanh@fastcompany.com','109.251.164.84','1988-08-24 10:50:57'), - ('blue', 19,'Roger','rfrazieri@scribd.com','38.145.218.108','1985-12-31 15:17:15'), - ('blue', 20,'Lillian','lstanleyj@goodreads.com','47.57.236.17','1970-06-08 02:09:05'), - ('blue', 21,'Aaron','arodriguezk@nps.gov','205.245.118.221','1985-10-11 23:07:49'), - ('blue', 22,'Patrick','pparkerl@techcrunch.com','19.8.100.182','2006-03-29 12:53:56'), - ('blue', 23,'Phillip','pmorenom@intel.com','41.38.254.103','2011-11-07 15:35:43'), - ('blue', 24,'Henry','hgarcian@newsvine.com','1.191.216.252','2008-08-28 08:30:44'), - ('blue', 25,'Irene','iturnero@opera.com','50.17.60.190','1994-04-01 07:15:02'), - ('blue', 26,'Andrew','adunnp@pen.io','123.52.253.176','2000-11-01 06:03:25'), - ('blue', 27,'David','dgutierrezq@wp.com','238.23.203.42','1988-01-25 07:29:18'), - ('blue', 28,'Henry','hsanchezr@cyberchimps.com','248.102.2.185','1983-01-01 13:36:37'), - ('blue', 29,'Evelyn','epetersons@gizmodo.com','32.80.46.119','1979-07-16 17:24:12'), - ('blue', 30,'Tammy','tmitchellt@purevolume.com','249.246.167.88','2001-04-03 10:00:23'), - ('blue', 31,'Jacqueline','jlittleu@domainmarket.com','127.181.97.47','1986-02-11 21:35:50'), - ('blue', 32,'Earl','eortizv@opera.com','166.47.248.240','1996-07-06 08:16:27'), - ('blue', 33,'Juan','jgordonw@sciencedirect.com','71.77.2.200','1987-01-31 03:46:44'), - ('blue', 34,'Diane','dhowellx@nyu.edu','140.94.133.12','1994-06-11 02:30:05'), - ('blue', 35,'Randy','rkennedyy@microsoft.com','73.255.34.196','2005-05-26 20:28:39'), - ('blue', 36,'Janice','jriveraz@time.com','22.214.227.32','1990-02-09 04:16:52'), - ('blue', 37,'Laura','lperry10@diigo.com','159.148.145.73','2015-03-17 05:59:25'), - ('blue', 38,'Gary','gray11@statcounter.com','40.193.124.56','1970-01-27 10:04:51'), - ('blue', 39,'Jesse','jmcdonald12@typepad.com','31.7.86.103','2009-03-14 08:14:29'), - ('blue', 40,'Sandra','sgonzalez13@goodreads.com','223.80.168.239','1993-05-21 14:08:54'), - ('blue', 41,'Scott','smoore14@archive.org','38.238.46.83','1980-08-30 11:16:56'), - ('blue', 42,'Phillip','pevans15@cisco.com','158.234.59.34','2011-12-15 23:26:31'), - ('blue', 43,'Steven','sriley16@google.ca','90.247.57.68','2011-10-29 19:03:28'), - ('blue', 44,'Deborah','dbrown17@hexun.com','179.125.143.240','1995-04-10 14:36:07'), - ('blue', 45,'Lori','lross18@ow.ly','64.80.162.180','1980-12-27 16:49:15'), - ('blue', 46,'Sean','sjackson19@tumblr.com','240.116.183.69','1988-06-12 21:24:45'), - ('blue', 47,'Terry','tbarnes1a@163.com','118.38.213.137','1997-09-22 16:43:19'), - ('blue', 48,'Dorothy','dross1b@ebay.com','116.81.76.49','2005-02-28 13:33:24'), - ('blue', 49,'Samuel','swashington1c@house.gov','38.191.253.40','1989-01-19 21:15:48'), - ('blue', 50,'Ralph','rcarter1d@tinyurl.com','104.84.60.174','2007-08-11 10:21:49'), - ('green', 51,'Wayne','whudson1e@princeton.edu','90.61.24.102','1983-07-03 16:58:12'), - ('green', 52,'Rose','rjames1f@plala.or.jp','240.83.81.10','1995-06-08 11:46:23'), - ('green', 53,'Louise','lcox1g@theglobeandmail.com','105.11.82.145','2016-09-19 14:45:51'), - ('green', 54,'Kenneth','kjohnson1h@independent.co.uk','139.5.45.94','1976-08-17 11:26:19'), - ('green', 55,'Donna','dbrown1i@amazon.co.uk','19.45.169.45','2006-05-27 16:51:40'), - ('green', 56,'Johnny','jvasquez1j@trellian.com','118.202.238.23','1975-11-17 08:42:32'), - ('green', 57,'Patrick','pramirez1k@tamu.edu','231.25.153.198','1997-08-06 11:51:09'), - ('green', 58,'Helen','hlarson1l@prweb.com','8.40.21.39','1993-08-04 19:53:40'), - ('green', 59,'Patricia','pspencer1m@gmpg.org','212.198.40.15','1977-08-03 16:37:27'), - ('green', 60,'Joseph','jspencer1n@marriott.com','13.15.63.238','2005-07-23 20:22:06'), - ('green', 61,'Phillip','pschmidt1o@blogtalkradio.com','177.98.201.190','1976-05-19 21:47:44'), - ('green', 62,'Joan','jwebb1p@google.ru','105.229.170.71','1972-09-07 17:53:47'), - ('green', 63,'Phyllis','pkennedy1q@imgur.com','35.145.8.244','2000-01-01 22:33:37'), - ('green', 64,'Katherine','khunter1r@smh.com.au','248.168.205.32','1991-01-09 06:40:24'), - ('green', 65,'Laura','lvasquez1s@wiley.com','128.129.115.152','1997-10-23 12:04:56'), - ('green', 66,'Juan','jdunn1t@state.gov','44.228.124.51','2004-11-10 05:07:35'), - ('green', 67,'Judith','jholmes1u@wiley.com','40.227.179.115','1977-08-02 17:01:45'), - ('green', 68,'Beverly','bbaker1v@wufoo.com','208.34.84.59','2016-03-06 20:07:23'), - ('green', 69,'Lawrence','lcarr1w@flickr.com','59.158.212.223','1988-09-13 06:07:21'), - ('green', 70,'Gloria','gwilliams1x@mtv.com','245.231.88.33','1995-03-18 22:32:46'), - ('green', 71,'Steven','ssims1y@cbslocal.com','104.50.58.255','2001-08-05 21:26:20'), - ('green', 72,'Betty','bmills1z@arstechnica.com','103.177.214.220','1981-12-14 21:26:54'), - ('green', 73,'Mildred','mfuller20@prnewswire.com','151.158.8.130','2000-04-19 10:13:55'), - ('green', 74,'Donald','dday21@icq.com','9.178.102.255','1972-12-03 00:58:24'), - ('green', 75,'Eric','ethomas22@addtoany.com','85.2.241.227','1992-11-01 05:59:30'), - ('green', 76,'Joyce','jarmstrong23@sitemeter.com','169.224.20.36','1985-10-24 06:50:01'), - ('green', 77,'Maria','mmartinez24@amazonaws.com','143.189.167.135','2005-10-05 05:17:42'), - ('green', 78,'Harry','hburton25@youtube.com','156.47.176.237','1978-03-26 05:53:33'), - ('green', 79,'Kevin','klawrence26@hao123.com','79.136.183.83','1994-10-12 04:38:52'), - ('green', 80,'David','dhall27@prweb.com','133.149.172.153','1976-12-15 16:24:24'), - ('green', 81,'Kathy','kperry28@twitter.com','229.242.72.228','1979-03-04 02:58:56'), - ('green', 82,'Adam','aprice29@elegantthemes.com','13.145.21.10','1982-11-07 11:46:59'), - ('green', 83,'Brandon','bgriffin2a@va.gov','73.249.128.212','2013-10-30 05:30:36'), - ('green', 84,'Henry','hnguyen2b@discovery.com','211.36.214.242','1985-01-09 06:37:27'), - ('green', 85,'Eric','esanchez2c@edublogs.org','191.166.188.251','2004-05-01 23:21:42'), - ('green', 86,'Jason','jlee2d@jimdo.com','193.92.16.182','1973-01-08 09:05:39'), - ('green', 87,'Diana','drichards2e@istockphoto.com','19.130.175.245','1994-10-05 22:50:49'), - ('green', 88,'Andrea','awelch2f@abc.net.au','94.155.233.96','2002-04-26 08:41:44'), - ('green', 89,'Louis','lwagner2g@miitbeian.gov.cn','26.217.34.111','2003-08-25 07:56:39'), - ('green', 90,'Jane','jsims2h@seesaa.net','43.4.220.135','1987-03-20 20:39:04'), - ('green', 91,'Larry','lgrant2i@si.edu','97.126.79.34','2000-09-07 20:26:19'), - ('green', 92,'Louis','ldean2j@prnewswire.com','37.148.40.127','2011-09-16 20:12:14'), - ('green', 93,'Jennifer','jcampbell2k@xing.com','38.106.254.142','1988-07-15 05:06:49'), - ('green', 94,'Wayne','wcunningham2l@google.com.hk','223.28.26.187','2009-12-15 06:16:54'), - ('green', 95,'Lori','lstevens2m@icq.com','181.250.181.58','1984-10-28 03:29:19'), - ('green', 96,'Judy','jsimpson2n@marriott.com','180.121.239.219','1986-02-07 15:18:10'), - ('green', 97,'Phillip','phoward2o@usa.gov','255.247.0.175','2002-12-26 08:44:45'), - ('green', 98,'Gloria','gwalker2p@usa.gov','156.140.7.128','1997-10-04 07:58:58'), - ('green', 99,'Paul','pjohnson2q@umn.edu','183.59.198.197','1991-11-14 12:33:55'), - ('green', 100,'Frank','fgreene2r@blogspot.com','150.143.68.121','2010-06-12 23:55:39'); diff --git a/tests/functional/schema_tests/fixtures.py b/tests/functional/schema_tests/fixtures.py deleted file mode 100644 index 51ae067bd..000000000 --- a/tests/functional/schema_tests/fixtures.py +++ /dev/null @@ -1,1275 +0,0 @@ -wrong_specification_block__schema_yml = """ -version: 2 -models: - - name: some_seed - description: "This is my seed under a model" -""" - -test_context_where_subq_models__schema_yml = """ -version: 2 - -models: - - name: model_a - data_tests: - - self_referential - -""" - -test_context_where_subq_models__model_a_sql = """ -select 1 as fun - -""" - -test_utils__dbt_project_yml = """ -name: 'test_utils' -version: '1.0' -config-version: 2 - -profile: 'default' - -macro-paths: ["macros"] - - -""" - -test_utils__macros__current_timestamp_sql = """ -{% macro current_timestamp() -%} - {{ return(adapter.dispatch('current_timestamp', 'test_utils')()) }} -{%- endmacro %} - -{% macro default__current_timestamp() -%} - now() -{%- endmacro %} - -""" - -test_utils__macros__custom_test_sql = """ -{% macro test_dispatch(model) -%} - {{ return(adapter.dispatch('test_dispatch', macro_namespace = 'test_utils')()) }} -{%- endmacro %} - -{% macro default__test_dispatch(model) %} - select {{ adapter.dispatch('current_timestamp', macro_namespace = 'test_utils')() }} -{% endmacro %} - -""" - -local_dependency__dbt_project_yml = """ -name: 'local_dep' -version: '1.0' -config-version: 2 - -profile: 'default' - -macro-paths: ["macros"] - -""" - -local_dependency__macros__equality_sql = """ -{#-- taken from dbt-utils --#} -{% test equality(model, compare_model, compare_columns=None) %} - {{ return(adapter.dispatch('test_equality')(model, compare_model, compare_columns)) }} -{% endtest %} - -{% macro default__test_equality(model, compare_model, compare_columns=None) %} - -{% set set_diff %} - count(*) + abs( - sum(case when which_diff = 'a_minus_b' then 1 else 0 end) - - sum(case when which_diff = 'b_minus_a' then 1 else 0 end) - ) -{% endset %} - -{#-- Needs to be set at parse time, before we return '' below --#} -{{ config(fail_calc = set_diff) }} - -{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #} -{%- if not execute -%} - {{ return('') }} -{% endif %} --- setup -{%- do dbt_utils._is_relation(model, 'test_equality') -%} -{#- -If the compare_cols arg is provided, we can run this test without querying the -information schema — this allows the model to be an ephemeral model --#} - -{%- if not compare_columns -%} - {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%} - {%- set compare_columns = adapter.get_columns_in_relation(model) | map(attribute='quoted') -%} -{%- endif -%} - -{% set compare_cols_csv = compare_columns | join(', ') %} - -with a as ( - select * from {{ model }} -), -b as ( - select * from {{ compare_model }} -), -a_minus_b as ( - select {{compare_cols_csv}} from a - {{ dbt_utils.except() }} - select {{compare_cols_csv}} from b -), -b_minus_a as ( - select {{compare_cols_csv}} from b - {{ dbt_utils.except() }} - select {{compare_cols_csv}} from a -), - -unioned as ( - - select 'a_minus_b' as which_diff, * from a_minus_b - union all - select 'b_minus_a' as which_diff, * from b_minus_a - -) - -select * from unioned - -{% endmacro %} - -""" - -case_sensitive_models__schema_yml = """ -version: 2 - -models: - - name: lowercase - columns: - - name: id - quote: true - data_tests: - - unique - - name: uppercase - columns: - - name: id - quote: true - data_tests: - - unique - -""" - -case_sensitive_models__uppercase_SQL = """ -select 1 as id - -""" - -case_sensitive_models__lowercase_sql = """ -select 1 as id - -""" - -test_context_macros__my_test_sql = """ -{% macro test_call_pkg_macro(model) %} - select {{ adapter.dispatch('current_timestamp', macro_namespace = 'local_utils')() }} -{% endmacro %} - -""" - -test_context_macros__test_my_datediff_sql = """ -{% macro test_my_datediff(model) %} - select {{ local_utils.datediff() }} -{% endmacro %} - -""" - -test_context_macros__custom_schema_tests_sql = """ -{% test type_one(model) %} - - select * from ( - - select * from {{ model }} - union all - select * from {{ ref('model_b') }} - - ) as Foo - -{% endtest %} - -{% test type_two(model) %} - - {{ config(severity = "WARN") }} - - select * from {{ model }} - -{% endtest %} - -""" - -test_context_models_namespaced__schema_yml = """ - -version: 2 - -models: - - name: model_a - data_tests: - - type_one - - type_two - - name: model_c - data_tests: - - call_pkg_macro - - test_utils.dispatch - -""" - -test_context_models_namespaced__model_c_sql = """ -select 1 as fun - -""" - -test_context_models_namespaced__model_b_sql = """ -select 1 as notfun - -""" - -test_context_models_namespaced__model_a_sql = """ -select 1 as fun - -""" - -macros_v2__override_get_test_macros_fail__get_test_sql_sql = """ -{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} - select - {{ fail_calc }} as failures, - case when {{ fail_calc }} {{ warn_if }} then 'x' else 'y' end as should_warn, - case when {{ fail_calc }} {{ error_if }} then 'x' else 'y' end as should_error - from ( - {{ main_sql }} - {{ "limit " ~ limit if limit != none }} - ) dbt_internal_test -{% endmacro %} -""" - -macros_v2__macros__tests_sql = """ -{% test every_value_is_blue(model, column_name) %} - - select * - from {{ model }} - where {{ column_name }} != 'blue' - -{% endtest %} - - -{% test rejected_values(model, column_name, values) %} - - select * - from {{ model }} - where {{ column_name }} in ( - {% for value in values %} - '{{ value }}' {% if not loop.last %} , {% endif %} - {% endfor %} - ) - -{% endtest %} - - -{% test equivalent(model, value) %} - {% set expected = 'foo-bar' %} - {% set eq = 1 if value == expected else 0 %} - {% set validation_message -%} - 'got "{{ value }}", expected "{{ expected }}"' - {%- endset %} - {% if eq == 0 and execute %} - {{ log(validation_message, info=True) }} - {% endif %} - - select {{ validation_message }} as validation_error - where {{ eq }} = 0 -{% endtest %} - - -""" - -macros_v2__override_get_test_macros__get_test_sql_sql = """ -{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} - select - {{ fail_calc }} as failures, - case when {{ fail_calc }} {{ warn_if }} then 1 else 0 end as should_warn, - case when {{ fail_calc }} {{ error_if }} then 1 else 0 end as should_error - from ( - {{ main_sql }} - {{ "limit " ~ limit if limit != none }} - ) dbt_internal_test -{%- endmacro %} -""" - -macros_v2__custom_configs__test_sql = """ -{% test where(model, column_name) %} - {{ config(where = "1 = 0") }} - select * from {{ model }} -{% endtest %} - -{% test error_if(model, column_name) %} - {{ config(error_if = "<= 0", warn_if = "<= 0") }} - select * from {{ model }} -{% endtest %} - - -{% test warn_if(model, column_name) %} - {{ config(warn_if = "<= 0", severity = "WARN") }} - select * from {{ model }} -{% endtest %} - -{% test limit(model, column_name) %} - {{ config(limit = 0) }} - select * from {{ model }} -{% endtest %} - -{% test fail_calc(model, column_name) %} - {{ config(fail_calc = "count(*) - count(*)") }} - select * from {{ model }} -{% endtest %} - -""" - -test_context_macros_namespaced__my_test_sql = """ -{% macro test_call_pkg_macro(model) %} - select {{ test_utils.current_timestamp() }} -{% endmacro %} - -""" - -test_context_macros_namespaced__custom_schema_tests_sql = """ -{% test type_one(model) %} - - select * from ( - - select * from {{ model }} - union all - select * from {{ ref('model_b') }} - - ) as Foo - -{% endtest %} - -{% test type_two(model) %} - - {{ config(severity = "WARN") }} - - select * from {{ model }} - -{% endtest %} - -""" - -seeds__some_seed_csv = """ -col_int,col_str -1,hello -2,goodbye -""" - -test_context_models__schema_yml = """ - -version: 2 - -models: - - name: model_a - data_tests: - - type_one - - type_two - - name: model_c - data_tests: - - call_pkg_macro - - local_utils.dispatch - - my_datediff - -""" - -test_context_models__model_c_sql = """ -select 1 as fun - -""" - -test_context_models__model_b_sql = """ -select 1 as notfun - -""" - -test_context_models__model_a_sql = """ -select 1 as fun - -""" - -name_collision__schema_yml = """ -version: 2 -models: -- name: base - columns: - - name: extension_id - data_tests: - - not_null -- name: base_extension - columns: - - name: id - data_tests: - - not_null - -""" - -name_collision__base_sql = """ -SELECT 'hello_world' AS extension_id -""" - -name_collision__base_extension_sql = """ -SELECT 'NOT_NULL' AS id -""" - - -dupe_generic_tests_collide__schema_yml = """ -version: 2 -models: -- name: model_a - columns: - - name: id - data_tests: - - not_null: - config: - where: "1=1" - - not_null: - config: - where: "1=2" - -""" - -dupe_generic_tests_collide__model_a = """ -SELECT 'NOT_NULL' AS id -""" - - -custom_generic_test_config_custom_macro__schema_yml = """ -version: 2 -models: -- name: model_a - columns: - - name: id - data_tests: - - not_null: - config: - where: "id = (select id from {{ ref('model_a') }} limit 1)" - -""" - -custom_generic_test_config_custom_macro__model_a = """ -SELECT 1 AS id -""" - - -custom_generic_test_names__schema_yml = """ -version: 2 -models: -- name: model_a - columns: - - name: id - data_tests: - - not_null: - name: not_null_where_1_equals_1 - config: - where: "1=1" - - not_null: - name: not_null_where_1_equals_2 - config: - where: "1=2" - -""" - -custom_generic_test_names__model_a = """ -SELECT 'NOT_NULL' AS id -""" - -custom_generic_test_names_alt_format__schema_yml = """ -version: 2 -models: -- name: model_a - columns: - - name: id - data_tests: - - name: not_null_where_1_equals_1 - test_name: not_null - config: - where: "1=1" - - name: not_null_where_1_equals_2 - test_name: not_null - config: - where: "1=2" - -""" - -custom_generic_test_names_alt_format__model_a = """ -SELECT 'NOT_NULL' AS id -""" - - -test_context_where_subq_macros__custom_generic_test_sql = """ -/*{# This test will fail if get_where_subquery() is missing from TestContext + TestMacroNamespace #}*/ - -{% test self_referential(model) %} - - {%- set relation = api.Relation.create(schema=model.schema, identifier=model.table) -%} - {%- set columns = adapter.get_columns_in_relation(relation) -%} - {%- set columns_csv = columns | map(attribute='name') | list | join(', ') -%} - - select {{ columns_csv }} from {{ model }} - limit 0 - -{% endtest %} - -""" - -invalid_schema_models__schema_yml = """ -version: 2 - -models: - name: model - columns: - - name: Id - quote: true - data_tests: - - unique - - not_null - -""" - -invalid_schema_models__model_sql = """ -select 1 as "Id" - -""" - -all_quotes_schema__schema_yml = """# models/schema.yml -# only comments here, which should be okay! -# https://github.com/dbt-labs/dbt-core/issues/3568""" - -models_v2__render_test_cli_arg_models__schema_yml = """ -version: 2 - -models: - - name: model - data_tests: - - equivalent: - value: "{{ var('myvar', 'baz') }}-bar" - -""" - -models_v2__render_test_cli_arg_models__model_sql = """ -select 1 as id - -""" - -models_v2__override_get_test_models__schema_yml = """ -version: 2 - -models: - - name: my_model_pass - description: "The table has 1 null values, and we're okay with that, until it's more than 1." - columns: - - name: id - description: "The number of responses for this favorite color - purple will be null" - data_tests: - - not_null: - error_if: '>1' - warn_if: '>1' - - - name: my_model_warning - description: "The table has 1 null values, and we're okay with that, but let us know" - columns: - - name: id - description: "The number of responses for this favorite color - purple will be null" - data_tests: - - not_null: - error_if: '>1' - - - name: my_model_failure - description: "The table has 2 null values, and we're not okay with that" - columns: - - name: id - description: "The number of responses for this favorite color - purple will be null" - data_tests: - - not_null: - error_if: '>1' - - -""" - -models_v2__override_get_test_models__my_model_warning_sql = """ -select * from {{ ref('my_model_pass') }} -""" - -models_v2__override_get_test_models__my_model_pass_sql = """ -select 1 as id -UNION ALL -select null as id -""" - -models_v2__override_get_test_models__my_model_failure_sql = """ -select * from {{ ref('my_model_pass') }} -UNION ALL -select null as id -""" - -models_v2__models__schema_yml = """ -version: 2 - -models: - - name: table_copy - description: "A copy of the table" - columns: - - name: id - description: "The ID" - data_tests: - - not_null - - unique - tags: - - table_id - - name: first_name - description: "The user's first name" - data_tests: - - not_null - tags: - - table_first_name - - name: ip_address - description: "The user's IP address" - data_tests: - - not_null - - name: updated_at - description: "The update time of the user" - data_tests: - - not_null - - name: email - description: "The user's email address" - data_tests: - - unique - - name: favorite_color - description: "The user's favorite color" - data_tests: - - accepted_values: { - values: ['blue', 'green'], - quote: true, - tags: table_copy_favorite_color # tags can be a single string - } - tags: - - table_favorite_color - - name: fav_number - description: "The user's favorite number" - data_tests: - - accepted_values: - values: [3.14159265] - quote: false - tags: # tags can be a list of strings - - favorite_number_is_pi - - - - name: table_summary - description: "The summary table" - columns: - - name: favorite_color_copy - description: "The favorite color" - data_tests: - - not_null - - unique - - accepted_values: { values: ['blue', 'green'] } - - relationships: { field: favorite_color, to: ref('table_copy') } - tags: - - table_favorite_color - - name: count - description: "The number of responses for this favorite color" - data_tests: - - not_null - -# all of these constraints will fail - - name: table_failure_copy - description: "The table copy that does not comply with the schema" - columns: - - name: id - description: "The user ID" - data_tests: - - not_null - - unique - tags: - - xfail - - name: favorite_color - description: "The user's favorite color" - data_tests: - - accepted_values: { values: ['blue', 'green'] } - tags: - - xfail - -# all of these constraints will fail - - name: table_failure_summary - description: "The table summary that does not comply with the schema" - columns: - - name: favorite_color - description: "The favorite color" - data_tests: - - accepted_values: { values: ['red'] } - - relationships: { field: favorite_color, to: ref('table_copy') } - tags: - - xfail - -# this table is disabled so these tests should be ignored - - name: table_disabled - description: "A disabled table" - columns: - - name: favorite_color - description: "The favorite color" - data_tests: - - accepted_values: { values: ['red'] } - - relationships: { field: favorite_color, to: ref('table_copy') } - -# all of these constraints will fail - - name: table_failure_null_relation - description: "A table with a null value where it should be a foreign key" - columns: - - name: id - description: "The user ID" - data_tests: - - relationships: { field: id, to: ref('table_failure_copy') } - tags: - - xfail - -""" - -models_v2__models__table_summary_sql = """ -{{ - config( - materialized='table' - ) -}} - -select favorite_color as favorite_color_copy, count(*) as count -from {{ ref('table_copy') }} -group by 1 - -""" - -models_v2__models__table_failure_summary_sql = """ -{{ - config( - materialized='table' - ) -}} - --- force a foreign key constraint failure here -select 'purple' as favorite_color, count(*) as count -from {{ ref('table_failure_copy') }} -group by 1 - -""" - -models_v2__models__table_disabled_sql = """ -{{ - config( - enabled=False - ) -}} - --- force a foreign key constraint failure here -select 'purple' as favorite_color, count(*) as count -from {{ ref('table_failure_copy') }} -group by 1 - -""" - -models_v2__models__table_failure_null_relation_sql = """ -{{ - config( - materialized='table' - ) -}} - --- force a foreign key constraint failure here -select 105 as id, count(*) as count -from {{ ref('table_failure_copy') }} -group by 1 - -""" - -models_v2__models__table_failure_copy_sql = """ - -{{ - config( - materialized='table' - ) -}} - -select * from {{ this.schema }}.seed_failure - -""" - -models_v2__models__table_copy_sql = """ - -{{ - config( - materialized='table' - ) -}} - -select * from {{ this.schema }}.seed - -""" - -models_v2__malformed__schema_yml = """ -version: 2 - -models: - # this whole model should fail and not run - - name: table_copy - description: "A copy of the table" - columns: - - name: id - description: "The ID" - data_tests: - - not_null - - unique - - name: favorite_color - data_tests: - # this is missing a "-" and is malformed - accepted_values: { values: ['blue', 'green'] } - - # this whole model should pass and run - - name: table_summary - description: "The summary table" - columns: - - name: favorite_color - description: "The favorite color" - data_tests: - - not_null - - unique - - accepted_values: { values: ['blue', 'green'] } - - relationships: { field: favorite_color, to: ref('table_copy') } - - name: count - description: "The number of responses for this favorite color" - data_tests: - - not_null - -""" - -models_v2__malformed__table_summary_sql = """ -{{ - config( - materialized='table' - ) -}} - -select favorite_color, count(*) as count -from {{ ref('table_copy') }} -group by 1 - -""" - -models_v2__malformed__table_copy_sql = """ - -{{ - config( - materialized='table' - ) -}} - -select * from {{ this.schema }}.seed - -""" - -models_v2__override_get_test_models_fail__schema_yml = """ -version: 2 - -models: - - name: my_model - description: "The table has 1 null values, and we're not okay with that." - columns: - - name: id - description: "The number of responses for this favorite color - purple will be null" - data_tests: - - not_null - - - -""" - -models_v2__override_get_test_models_fail__my_model_sql = """ -select 1 as id -UNION ALL -select null as id -""" - -models_v2__custom_configs__schema_yml = """ -version: 2 - -models: - - name: table_copy - description: "A copy of the table" - # passes - data_tests: - - where - - error_if - - warn_if - - limit - - fail_calc - columns: - - name: id - data_tests: - # relationships with where - - relationships: - to: ref('table_copy') # itself - field: id - where: 1=1 - - name: table_copy_another_one - data_tests: - - where: # test override + weird quoting - config: - where: "\\"favorite_color\\" = 'red'" - - name: "table.copy.with.dots" - description: "A copy of the table with a gross name" - # passes, see https://github.com/dbt-labs/dbt-core/issues/3857 - data_tests: - - where - -""" - -models_v2__custom_configs__table_copy_another_one_sql = """ -select * from {{ ref('table_copy') }} - -""" - -models_v2__custom_configs__table_copy_sql = """ - -{{ - config( - materialized='table' - ) -}} - -select * from {{ this.schema }}.seed - -""" - -models_v2__custom_configs__table_copy_with_dots_sql = """ -select * from {{ ref('table_copy') }} - -""" - -models_v2__render_test_configured_arg_models__schema_yml = """ -version: 2 - -models: - - name: model - data_tests: - - equivalent: - value: "{{ var('myvar', 'baz') }}-bar" - -""" - -models_v2__render_test_configured_arg_models__model_sql = """ -select 1 as id - -""" - -models_v2__custom__schema_yml = """ -version: 2 - -models: - - name: table_copy - description: "A copy of the table" - columns: - - name: email - data_tests: - - not_null - - name: id - description: "The ID" - data_tests: - - unique - - name: favorite_color - data_tests: - - every_value_is_blue - - rejected_values: { values: ['orange', 'purple'] } - # passes - data_tests: - - local_dep.equality: { compare_model: ref('table_copy') } - -""" - -models_v2__custom__table_copy_sql = """ - -{{ - config( - materialized='table' - ) -}} - -select * from {{ this.schema }}.seed - -""" - -models_v2__limit_null__schema_yml = """ -version: 2 - -models: - - name: table_limit_null - description: "The table has 1 null values, and we're okay with that, until it's more than 1." - columns: - - name: favorite_color_full_list - description: "The favorite color" - - name: count - description: "The number of responses for this favorite color - purple will be null" - data_tests: - - not_null: - error_if: '>1' - warn_if: '>1' - - - name: table_warning_limit_null - description: "The table has 1 null value, and we're okay with 1, but want to know of any." - columns: - - name: favorite_color_full_list - description: "The favorite color" - - name: count - description: "The number of responses for this favorite color - purple will be null" - data_tests: - - not_null: - error_if: '>1' - - - name: table_failure_limit_null - description: "The table has some 2 null values, and that's not ok. Warn and error." - columns: - - name: favorite_color_full_list - description: "The favorite color" - - name: count - description: "The number of responses for this favorite color - purple will be null" - data_tests: - - not_null: - error_if: '>1' - -""" - -models_v2__limit_null__table_warning_limit_null_sql = """ -{{ - config( - materialized='table' - ) -}} - -select * from {{ref('table_limit_null')}} -""" - -models_v2__limit_null__table_limit_null_sql = """ -{{ - config( - materialized='table' - ) -}} - -select favorite_color as favorite_color_full_list, count(*) as count -from {{ this.schema }}.seed -group by 1 - -UNION ALL - -select 'purple' as favorite_color_full_list, null as count -""" - -models_v2__limit_null__table_failure_limit_null_sql = """ -{{ - config( - materialized='table' - ) -}} - -select * from {{ref('table_limit_null')}} - -UNION ALL - -select 'magenta' as favorite_color_full_list, null as count -""" - -local_utils__dbt_project_yml = """ -name: 'local_utils' -version: '1.0' -config-version: 2 - -profile: 'default' - -macro-paths: ["macros"] - - -""" - -local_utils__macros__datediff_sql = """ -{% macro datediff(first_date, second_date, datepart) %} - {{ return(adapter.dispatch('datediff', 'local_utils')(first_date, second_date, datepart)) }} -{% endmacro %} - - -{% macro default__datediff(first_date, second_date, datepart) %} - - datediff( - {{ datepart }}, - {{ first_date }}, - {{ second_date }} - ) - -{% endmacro %} - - -{% macro postgres__datediff(first_date, second_date, datepart) %} - - {% if datepart == 'year' %} - (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date)) - {% elif datepart == 'quarter' %} - ({{ adapter.dispatch('datediff', 'local_utils')(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date)) - {% else %} - ( 1000 ) - {% endif %} - -{% endmacro %} - - -""" - -local_utils__macros__current_timestamp_sql = """ -{% macro current_timestamp() -%} - {{ return(adapter.dispatch('current_timestamp')) }} -{%- endmacro %} - -{% macro default__current_timestamp() -%} - now() -{%- endmacro %} - -""" - -local_utils__macros__custom_test_sql = """ -{% macro test_dispatch(model) -%} - {{ return(adapter.dispatch('test_dispatch', macro_namespace = 'local_utils')()) }} -{%- endmacro %} - -{% macro default__test_dispatch(model) %} - select {{ adapter.dispatch('current_timestamp', macro_namespace = 'local_utils')() }} -{% endmacro %} - -""" - -ephemeral__schema_yml = """ - -version: 2 -models: - - name: ephemeral - columns: - - name: id - data_tests: - - unique - -""" - -ephemeral__ephemeral_sql = """ - -{{ config(materialized='ephemeral') }} - -select 1 as id - -""" - -quote_required_models__schema_yml = """ -version: 2 - -models: - - name: model - columns: - - name: Id - quote: true - data_tests: - - unique - - not_null - - name: model_again - quote_columns: true - columns: - - name: Id - data_tests: - - unique - - not_null - - name: model_noquote - quote_columns: true - columns: - - name: Id - quote: false - data_tests: - - unique - - not_null - -sources: - # this should result in column quoting = true - - name: my_source - schema: "{{ target.schema }}" - quoting: - column: true - tables: - - name: model - quoting: - column: false - columns: - - name: Id - quote: true - data_tests: - - unique - - name: my_source_2 - schema: "{{ target.schema }}" - quoting: - column: false - tables: - # this should result in column quoting = true - - name: model - quoting: - column: true - columns: - - name: Id - data_tests: - - unique - # this should result in column quoting = false - - name: model_noquote - columns: - - name: Id - data_tests: - - unique - - -""" - -quote_required_models__model_again_sql = """ -select 1 as "Id" - -""" - -quote_required_models__model_noquote_sql = """ -select 1 as id - -""" - -quote_required_models__model_sql = """ -select 1 as "Id" - -""" - -alt_local_utils__macros__type_timestamp_sql = """ -{%- macro type_timestamp() -%} - {{ return(adapter.dispatch('type_timestamp', 'local_utils')()) }} -{%- endmacro -%} - -{% macro default__type_timestamp() %} - {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }} -{% endmacro %} -""" - -macro_resolution_order_macros__my_custom_test_sql = """ -{% test my_custom_test(model) %} - select cast(current_timestamp as {{ dbt.type_timestamp() }}) - limit 0 -{% endtest %} -""" - -macro_resolution_order_models__my_model_sql = """ -select 1 as id -""" - -macro_resolution_order_models__config_yml = """ -version: 2 -models: - - name: my_model - data_tests: - - my_custom_test -""" diff --git a/tests/functional/schema_tests/test_schema_v2_tests.py b/tests/functional/schema_tests/test_schema_v2_tests.py deleted file mode 100644 index 68158efd7..000000000 --- a/tests/functional/schema_tests/test_schema_v2_tests.py +++ /dev/null @@ -1,1110 +0,0 @@ -import os -import re - -from dbt.contracts.results import TestStatus -from dbt.exceptions import ParsingError -from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import run_dbt, write_file -from dbt_common.exceptions import CompilationError -import pytest - -from tests.functional.schema_tests.fixtures import ( - alt_local_utils__macros__type_timestamp_sql, - all_quotes_schema__schema_yml, - case_sensitive_models__lowercase_sql, - case_sensitive_models__schema_yml, - case_sensitive_models__uppercase_SQL, - custom_generic_test_config_custom_macro__model_a, - custom_generic_test_config_custom_macro__schema_yml, - custom_generic_test_names__model_a, - custom_generic_test_names__schema_yml, - custom_generic_test_names_alt_format__model_a, - custom_generic_test_names_alt_format__schema_yml, - ephemeral__ephemeral_sql, - ephemeral__schema_yml, - invalid_schema_models__model_sql, - invalid_schema_models__schema_yml, - local_dependency__dbt_project_yml, - local_dependency__macros__equality_sql, - local_utils__dbt_project_yml, - local_utils__macros__current_timestamp_sql, - local_utils__macros__custom_test_sql, - local_utils__macros__datediff_sql, - macro_resolution_order_models__config_yml, - macro_resolution_order_macros__my_custom_test_sql, - macro_resolution_order_models__my_model_sql, - macros_v2__custom_configs__test_sql, - macros_v2__macros__tests_sql, - macros_v2__override_get_test_macros__get_test_sql_sql, - macros_v2__override_get_test_macros_fail__get_test_sql_sql, - models_v2__custom__schema_yml, - models_v2__custom__table_copy_sql, - models_v2__custom_configs__schema_yml, - models_v2__custom_configs__table_copy_another_one_sql, - models_v2__custom_configs__table_copy_sql, - models_v2__custom_configs__table_copy_with_dots_sql, - models_v2__limit_null__schema_yml, - models_v2__limit_null__table_failure_limit_null_sql, - models_v2__limit_null__table_limit_null_sql, - models_v2__limit_null__table_warning_limit_null_sql, - models_v2__malformed__schema_yml, - models_v2__malformed__table_copy_sql, - models_v2__malformed__table_summary_sql, - models_v2__models__schema_yml, - models_v2__models__table_copy_sql, - models_v2__models__table_disabled_sql, - models_v2__models__table_failure_copy_sql, - models_v2__models__table_failure_null_relation_sql, - models_v2__models__table_failure_summary_sql, - models_v2__models__table_summary_sql, - models_v2__override_get_test_models__my_model_failure_sql, - models_v2__override_get_test_models__my_model_pass_sql, - models_v2__override_get_test_models__my_model_warning_sql, - models_v2__override_get_test_models__schema_yml, - models_v2__override_get_test_models_fail__my_model_sql, - models_v2__override_get_test_models_fail__schema_yml, - models_v2__render_test_cli_arg_models__model_sql, - models_v2__render_test_cli_arg_models__schema_yml, - models_v2__render_test_configured_arg_models__model_sql, - models_v2__render_test_configured_arg_models__schema_yml, - name_collision__base_sql, - name_collision__base_extension_sql, - name_collision__schema_yml, - quote_required_models__model_again_sql, - quote_required_models__model_noquote_sql, - quote_required_models__model_sql, - quote_required_models__schema_yml, - seeds__some_seed_csv, - test_context_where_subq_models__model_a_sql, - test_context_where_subq_models__schema_yml, - test_context_macros__custom_schema_tests_sql, - test_context_macros__my_test_sql, - test_context_macros__test_my_datediff_sql, - test_context_models__model_a_sql, - test_context_models__model_b_sql, - test_context_models__model_c_sql, - test_context_models__schema_yml, - test_context_macros_namespaced__custom_schema_tests_sql, - test_context_models_namespaced__model_a_sql, - test_context_models_namespaced__model_b_sql, - test_context_models_namespaced__model_c_sql, - test_context_macros_namespaced__my_test_sql, - test_context_models_namespaced__schema_yml, - test_context_where_subq_macros__custom_generic_test_sql, - test_utils__dbt_project_yml, - test_utils__macros__current_timestamp_sql, - test_utils__macros__custom_test_sql, - wrong_specification_block__schema_yml, -) - - -class TestSchemaTests: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project): - project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) - project.run_sql_file(os.path.join(project.test_data_dir, "seed_failure.sql")) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__models__schema_yml, - "table_summary.sql": models_v2__models__table_summary_sql, - "table_failure_summary.sql": models_v2__models__table_failure_summary_sql, - "table_disabled.sql": models_v2__models__table_disabled_sql, - "table_failure_null_relation.sql": models_v2__models__table_failure_null_relation_sql, - "table_failure_copy.sql": models_v2__models__table_failure_copy_sql, - "table_copy.sql": models_v2__models__table_copy_sql, - } - - def assertTestFailed(self, result): - assert result.status == "fail" - assert not result.skipped - assert result.failures > 0, "test {} did not fail".format(result.node.name) - - def assertTestPassed(self, result): - assert result.status == "pass" - assert not result.skipped - assert result.failures == 0, "test {} failed".format(result.node.name) - - def test_schema_tests( - self, - project, - ): - results = run_dbt() - assert len(results) == 5 - test_results = run_dbt(["test"], expect_pass=False) - # If the disabled model's tests ran, there would be 20 of these. - assert len(test_results) == 19 - - for result in test_results: - # assert that all deliberately failing tests actually fail - if "failure" in result.node.name: - self.assertTestFailed(result) - # assert that actual tests pass - else: - self.assertTestPassed(result) - assert sum(x.failures for x in test_results) == 6 - - def test_schema_test_selection( - self, - project, - ): - results = run_dbt() - assert len(results) == 5 - test_results = run_dbt(["test", "--models", "tag:table_favorite_color"]) - # 1 in table_copy, 4 in table_summary - assert len(test_results) == 5 - for result in test_results: - self.assertTestPassed(result) - - test_results = run_dbt(["test", "--models", "tag:favorite_number_is_pi"]) - assert len(test_results) == 1 - self.assertTestPassed(test_results[0]) - - test_results = run_dbt(["test", "--models", "tag:table_copy_favorite_color"]) - assert len(test_results) == 1 - self.assertTestPassed(test_results[0]) - - def test_schema_test_exclude_failures( - self, - project, - ): - results = run_dbt() - assert len(results) == 5 - test_results = run_dbt(["test", "--exclude", "tag:xfail"]) - # If the failed + disabled model's tests ran, there would be 20 of these. - assert len(test_results) == 13 - for result in test_results: - self.assertTestPassed(result) - test_results = run_dbt(["test", "--models", "tag:xfail"], expect_pass=False) - assert len(test_results) == 6 - for result in test_results: - self.assertTestFailed(result) - - -class TestLimitedSchemaTests: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project): - project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__limit_null__schema_yml, - "table_warning_limit_null.sql": models_v2__limit_null__table_warning_limit_null_sql, - "table_limit_null.sql": models_v2__limit_null__table_limit_null_sql, - "table_failure_limit_null.sql": models_v2__limit_null__table_failure_limit_null_sql, - } - - def assertTestFailed(self, result): - assert result.status == "fail" - assert not result.skipped - assert result.failures > 0, "test {} did not fail".format(result.node.name) - - def assertTestWarn(self, result): - assert result.status == "warn" - assert not result.skipped - assert result.failures > 0, "test {} passed without expected warning".format( - result.node.name - ) - - def assertTestPassed(self, result): - assert result.status == "pass" - assert not result.skipped - assert result.failures == 0, "test {} failed".format(result.node.name) - - def test_limit_schema_tests( - self, - project, - ): - results = run_dbt() - assert len(results) == 3 - test_results = run_dbt(["test"], expect_pass=False) - assert len(test_results) == 3 - - for result in test_results: - # assert that all deliberately failing tests actually fail - if "failure" in result.node.name: - self.assertTestFailed(result) - # assert that tests with warnings have them - elif "warning" in result.node.name: - self.assertTestWarn(result) - # assert that actual tests pass - else: - self.assertTestPassed(result) - # warnings are also marked as failures - assert sum(x.failures for x in test_results) == 3 - - -class TestDefaultBoolType: - # test with default True/False in get_test_sql macro - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__override_get_test_models__schema_yml, - "my_model_warning.sql": models_v2__override_get_test_models__my_model_warning_sql, - "my_model_pass.sql": models_v2__override_get_test_models__my_model_pass_sql, - "my_model_failure.sql": models_v2__override_get_test_models__my_model_failure_sql, - } - - def assertTestFailed(self, result): - assert result.status == "fail" - assert not result.skipped - assert result.failures > 0, "test {} did not fail".format(result.node.name) - - def assertTestWarn(self, result): - assert result.status == "warn" - assert not result.skipped - assert result.failures > 0, "test {} passed without expected warning".format( - result.node.name - ) - - def assertTestPassed(self, result): - assert result.status == "pass" - assert not result.skipped - assert result.failures == 0, "test {} failed".format(result.node.name) - - def test_limit_schema_tests( - self, - project, - ): - results = run_dbt() - assert len(results) == 3 - test_results = run_dbt(["test"], expect_pass=False) - assert len(test_results) == 3 - - for result in test_results: - # assert that all deliberately failing tests actually fail - if "failure" in result.node.name: - self.assertTestFailed(result) - # assert that tests with warnings have them - elif "warning" in result.node.name: - self.assertTestWarn(result) - # assert that actual tests pass - else: - self.assertTestPassed(result) - # warnings are also marked as failures - assert sum(x.failures for x in test_results) == 3 - - -class TestOtherBoolType: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - macros_v2_file = { - "override_get_test_macros": { - "get_test_sql.sql": macros_v2__override_get_test_macros__get_test_sql_sql - }, - } - write_project_files(project_root, "macros-v2", macros_v2_file) - - # test with expected 0/1 in custom get_test_sql macro - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__override_get_test_models__schema_yml, - "my_model_warning.sql": models_v2__override_get_test_models__my_model_warning_sql, - "my_model_pass.sql": models_v2__override_get_test_models__my_model_pass_sql, - "my_model_failure.sql": models_v2__override_get_test_models__my_model_failure_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "macro-paths": ["macros-v2/override_get_test_macros"], - } - - def assertTestFailed(self, result): - assert result.status == "fail" - assert not result.skipped - assert result.failures > 0, "test {} did not fail".format(result.node.name) - - def assertTestWarn(self, result): - assert result.status == "warn" - assert not result.skipped - assert result.failures > 0, "test {} passed without expected warning".format( - result.node.name - ) - - def assertTestPassed(self, result): - assert result.status == "pass" - assert not result.skipped - assert result.failures == 0, "test {} failed".format(result.node.name) - - def test_limit_schema_tests( - self, - project, - ): - results = run_dbt() - assert len(results) == 3 - test_results = run_dbt(["test"], expect_pass=False) - assert len(test_results) == 3 - - for result in test_results: - # assert that all deliberately failing tests actually fail - if "failure" in result.node.name: - self.assertTestFailed(result) - # assert that tests with warnings have them - elif "warning" in result.node.name: - self.assertTestWarn(result) - # assert that actual tests pass - else: - self.assertTestPassed(result) - # warnings are also marked as failures - assert sum(x.failures for x in test_results) == 3 - - -class TestNonBoolType: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - macros_v2_file = { - "override_get_test_macros_fail": { - "get_test_sql.sql": macros_v2__override_get_test_macros_fail__get_test_sql_sql - }, - } - write_project_files(project_root, "macros-v2", macros_v2_file) - - # test with invalid 'x'/'y' in custom get_test_sql macro - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__override_get_test_models_fail__schema_yml, - "my_model.sql": models_v2__override_get_test_models_fail__my_model_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "macro-paths": ["macros-v2/override_get_test_macros_fail"], - } - - def test_limit_schema_tests( - self, - project, - ): - results = run_dbt() - assert len(results) == 1 - run_result = run_dbt(["test"], expect_pass=False) - results = run_result.results - assert len(results) == 1 - assert results[0].status == TestStatus.Error - assert re.search(r"'get_test_sql' returns 'x'", results[0].message) - - -class TestMalformedSchemaTests: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project): - project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__malformed__schema_yml, - "table_summary.sql": models_v2__malformed__table_summary_sql, - "table_copy.sql": models_v2__malformed__table_copy_sql, - } - - def test_malformed_schema_will_break_run( - self, - project, - ): - with pytest.raises(ParsingError): - run_dbt() - - -class TestCustomConfigSchemaTests: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project, project_root): - project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) - - macros_v2_file = {"custom-configs": {"test.sql": macros_v2__custom_configs__test_sql}} - write_project_files(project_root, "macros-v2", macros_v2_file) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__custom_configs__schema_yml, - "table_copy_another_one.sql": models_v2__custom_configs__table_copy_another_one_sql, - "table_copy.sql": models_v2__custom_configs__table_copy_sql, - "table.copy.with.dots.sql": models_v2__custom_configs__table_copy_with_dots_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "macro-paths": ["macros-v2/custom-configs"], - } - - def test_config( - self, - project, - ): - """Test that tests use configs properly. All tests for - this project will fail, configs are set to make test pass.""" - results = run_dbt(["test"], expect_pass=False) - - assert len(results) == 8 - for result in results: - assert not result.skipped - - -class TestHooksInTests: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": ephemeral__schema_yml, - "ephemeral.sql": ephemeral__ephemeral_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "on-run-start": ["{{ log('hooks called in tests -- good!') if execute }}"], - "on-run-end": ["{{ log('hooks called in tests -- good!') if execute }}"], - } - - def test_hooks_do_run_for_tests( - self, - project, - ): - # This passes now that hooks run, a behavior we changed in v1.0 - results = run_dbt(["test", "--model", "ephemeral"]) - assert len(results) == 3 - for result in results: - assert result.status in ("pass", "success") - assert not result.skipped - assert result.failures == 0, "test {} failed".format(result.node.name) - - -class TestHooksForWhich: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": ephemeral__schema_yml, - "ephemeral.sql": ephemeral__ephemeral_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "on-run-start": [ - "{{exceptions.raise_compiler_error('hooks called in tests -- error') if (execute and flags.WHICH != 'test') }}" - ], - "on-run-end": [ - "{{exceptions.raise_compiler_error('hooks called in tests -- error') if (execute and flags.WHICH != 'test') }}" - ], - } - - def test_these_hooks_dont_run_for_tests( - self, - project, - ): - # This would fail if the hooks ran - results = run_dbt(["test", "--model", "ephemeral"]) - assert len(results) == 3 - for result in results: - assert result.status in ("pass", "success") - assert not result.skipped - assert result.failures == 0, "test {} failed".format(result.node.name) - - -class TestCustomSchemaTests: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project, project_root, dbt_integration_project): # noqa: F811 - write_project_files(project_root, "dbt_integration_project", dbt_integration_project) - project.run_sql_file(os.path.join(project.test_data_dir, "seed.sql")) - - local_dependency_files = { - "dbt_project.yml": local_dependency__dbt_project_yml, - "macros": {"equality.sql": local_dependency__macros__equality_sql}, - } - write_project_files(project_root, "local_dependency", local_dependency_files) - - macros_v2_file = { - "macros": {"tests.sql": macros_v2__macros__tests_sql}, - } - write_project_files(project_root, "macros-v2", macros_v2_file) - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - { - "local": "./local_dependency", - }, - { - "local": "./dbt_integration_project", - }, - ] - } - - @pytest.fixture(scope="class") - def project_config_update(self): - # dbt-utils contains a schema test (equality) - # dbt-integration-project contains a schema.yml file - # both should work! - return { - "config-version": 2, - "macro-paths": ["macros-v2/macros"], - } - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__custom__schema_yml, - "table_copy.sql": models_v2__custom__table_copy_sql, - } - - def test_schema_tests( - self, - project, - ): - run_dbt(["deps"]) - results = run_dbt() - assert len(results) == 4 - - test_results = run_dbt(["test"], expect_pass=False) - assert len(test_results) == 6 - - expected_failures = [ - "not_null_table_copy_email", - "every_value_is_blue_table_copy_favorite_color", - ] - - for result in test_results: - if result.status == "fail": - assert result.node.name in expected_failures - - -class TestQuotedSchemaTestColumns: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": quote_required_models__schema_yml, - "model_again.sql": quote_required_models__model_again_sql, - "model_noquote.sql": quote_required_models__model_noquote_sql, - "model.sql": quote_required_models__model_sql, - } - - def test_quote_required_column( - self, - project, - ): - results = run_dbt() - assert len(results) == 3 - results = run_dbt(["test", "-m", "model"]) - assert len(results) == 2 - results = run_dbt(["test", "-m", "model_again"]) - assert len(results) == 2 - results = run_dbt(["test", "-m", "model_noquote"]) - assert len(results) == 2 - results = run_dbt(["test", "-m", "source:my_source"]) - assert len(results) == 1 - results = run_dbt(["test", "-m", "source:my_source_2"]) - assert len(results) == 2 - - -class TestCliVarsSchemaTests: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - macros_v2_file = { - "macros": {"tests.sql": macros_v2__macros__tests_sql}, - } - write_project_files(project_root, "macros-v2", macros_v2_file) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__render_test_cli_arg_models__schema_yml, - "model.sql": models_v2__render_test_cli_arg_models__model_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "macro-paths": ["macros-v2/macros"], - } - - def test_argument_rendering( - self, - project, - ): - results = run_dbt() - assert len(results) == 1 - results = run_dbt(["test", "--vars", "{myvar: foo}"]) - assert len(results) == 1 - run_dbt(["test"], expect_pass=False) - - -class TestConfiguredVarsSchemaTests: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - macros_v2_file = { - "macros": {"tests.sql": macros_v2__macros__tests_sql}, - } - write_project_files(project_root, "macros-v2", macros_v2_file) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": models_v2__render_test_configured_arg_models__schema_yml, - "model.sql": models_v2__render_test_configured_arg_models__model_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "macro-paths": ["macros-v2/macros"], - "vars": {"myvar": "foo"}, - } - - def test_argument_rendering( - self, - project, - ): - results = run_dbt() - assert len(results) == 1 - results = run_dbt(["test"]) - assert len(results) == 1 - - -class TestSchemaCaseInsensitive: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": case_sensitive_models__schema_yml, - "lowercase.sql": case_sensitive_models__lowercase_sql, - } - - @pytest.fixture(scope="class", autouse=True) - def setUP(self, project): - # Create the uppercase SQL file - model_dir = os.path.join(project.project_root, "models") - write_file(case_sensitive_models__uppercase_SQL, model_dir, "uppercase.SQL") - - def test_schema_lowercase_sql( - self, - project, - ): - results = run_dbt() - assert len(results) == 2 - results = run_dbt(["test", "-m", "lowercase"]) - assert len(results) == 1 - - def test_schema_uppercase_sql( - self, - project, - ): - results = run_dbt() - assert len(results) == 2 - results = run_dbt(["test", "-m", "uppercase"]) - assert len(results) == 1 - - -class TestSchemaTestContext: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - local_utils_files = { - "dbt_project.yml": local_utils__dbt_project_yml, - "macros": { - "datediff.sql": local_utils__macros__datediff_sql, - "current_timestamp.sql": local_utils__macros__current_timestamp_sql, - "custom_test.sql": local_utils__macros__custom_test_sql, - }, - } - write_project_files(project_root, "local_utils", local_utils_files) - - test_context_macros_files = { - "my_test.sql": test_context_macros__my_test_sql, - "test_my_datediff.sql": test_context_macros__test_my_datediff_sql, - "custom_schema_tests.sql": test_context_macros__custom_schema_tests_sql, - } - write_project_files(project_root, "test-context-macros", test_context_macros_files) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": test_context_models__schema_yml, - "model_c.sql": test_context_models__model_c_sql, - "model_b.sql": test_context_models__model_b_sql, - "model_a.sql": test_context_models__model_a_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "macro-paths": ["test-context-macros"], - "vars": {"local_utils_dispatch_list": ["local_utils"]}, - } - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "local_utils"}]} - - def test_test_context_tests(self, project): - # This test tests the the TestContext and TestMacroNamespace - # are working correctly - run_dbt(["deps"]) - results = run_dbt() - assert len(results) == 3 - - run_result = run_dbt(["test"], expect_pass=False) - results = run_result.results - results = sorted(results, key=lambda r: r.node.name) - assert len(results) == 5 - # call_pkg_macro_model_c_ - assert results[0].status == TestStatus.Fail - # dispatch_model_c_ - assert results[1].status == TestStatus.Fail - # my_datediff - assert re.search(r"1000", results[2].node.compiled_code) - # type_one_model_a_ - assert results[3].status == TestStatus.Fail - assert re.search(r"union all", results[3].node.compiled_code) - # type_two_model_a_ - assert results[4].status == TestStatus.Warn - assert results[4].node.config.severity == "WARN" - - -class TestSchemaTestContextWithMacroNamespace: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - test_utils_files = { - "dbt_project.yml": test_utils__dbt_project_yml, - "macros": { - "current_timestamp.sql": test_utils__macros__current_timestamp_sql, - "custom_test.sql": test_utils__macros__custom_test_sql, - }, - } - write_project_files(project_root, "test_utils", test_utils_files) - - local_utils_files = { - "dbt_project.yml": local_utils__dbt_project_yml, - "macros": { - "datediff.sql": local_utils__macros__datediff_sql, - "current_timestamp.sql": local_utils__macros__current_timestamp_sql, - "custom_test.sql": local_utils__macros__custom_test_sql, - }, - } - write_project_files(project_root, "local_utils", local_utils_files) - - test_context_macros_namespaced_file = { - "my_test.sql": test_context_macros_namespaced__my_test_sql, - "custom_schema_tests.sql": test_context_macros_namespaced__custom_schema_tests_sql, - } - write_project_files( - project_root, "test-context-macros-namespaced", test_context_macros_namespaced_file - ) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": test_context_models_namespaced__schema_yml, - "model_c.sql": test_context_models_namespaced__model_c_sql, - "model_b.sql": test_context_models_namespaced__model_b_sql, - "model_a.sql": test_context_models_namespaced__model_a_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "macro-paths": ["test-context-macros-namespaced"], - "dispatch": [ - { - "macro_namespace": "test_utils", - "search_order": ["local_utils", "test_utils"], - } - ], - } - - @pytest.fixture(scope="class") - def packages(self): - return { - "packages": [ - {"local": "test_utils"}, - {"local": "local_utils"}, - ] - } - - def test_test_context_with_macro_namespace( - self, - project, - ): - # This test tests the the TestContext and TestMacroNamespace - # are working correctly - run_dbt(["deps"]) - results = run_dbt() - assert len(results) == 3 - - run_result = run_dbt(["test"], expect_pass=False) - results = run_result.results - results = sorted(results, key=lambda r: r.node.name) - assert len(results) == 4 - # call_pkg_macro_model_c_ - assert results[0].status == TestStatus.Fail - # dispatch_model_c_ - assert results[1].status == TestStatus.Fail - # type_one_model_a_ - assert results[2].status == TestStatus.Fail - assert re.search(r"union all", results[2].node.compiled_code) - # type_two_model_a_ - assert results[3].status == TestStatus.Warn - assert results[3].node.config.severity == "WARN" - - -class TestSchemaTestNameCollision: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": name_collision__schema_yml, - "base.sql": name_collision__base_sql, - "base_extension.sql": name_collision__base_extension_sql, - } - - def test_collision_test_names_get_hash( - self, - project, - ): - """The models should produce unique IDs with a has appended""" - results = run_dbt() - test_results = run_dbt(["test"]) - - # both models and both tests run - assert len(results) == 2 - assert len(test_results) == 2 - - # both tests have the same unique id except for the hash - expected_unique_ids = [ - "test.test.not_null_base_extension_id.922d83a56c", - "test.test.not_null_base_extension_id.c8d18fe069", - ] - assert test_results[0].node.unique_id in expected_unique_ids - assert test_results[1].node.unique_id in expected_unique_ids - - -class TestGenericTestsConfigCustomMacros: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": custom_generic_test_config_custom_macro__schema_yml, - "model_a.sql": custom_generic_test_config_custom_macro__model_a, - } - - def test_generic_test_config_custom_macros( - self, - project, - ): - """This test has a reference to a custom macro its configs""" - with pytest.raises(CompilationError) as exc: - run_dbt() - assert "Invalid generic test configuration" in str(exc) - - -class TestGenericTestsCustomNames: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": custom_generic_test_names__schema_yml, - "model_a.sql": custom_generic_test_names__model_a, - } - - # users can define custom names for specific instances of generic tests - def test_generic_tests_with_custom_names( - self, - project, - ): - """These tests don't collide, since they have user-provided custom names""" - results = run_dbt() - test_results = run_dbt(["test"]) - - # model + both tests run - assert len(results) == 1 - assert len(test_results) == 2 - - # custom names propagate to the unique_id - expected_unique_ids = [ - "test.test.not_null_where_1_equals_1.7b96089006", - "test.test.not_null_where_1_equals_2.8ae586e17f", - ] - assert test_results[0].node.unique_id in expected_unique_ids - assert test_results[1].node.unique_id in expected_unique_ids - - -class TestGenericTestsCustomNamesAltFormat(TestGenericTestsCustomNames): - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": custom_generic_test_names_alt_format__schema_yml, - "model_a.sql": custom_generic_test_names_alt_format__model_a, - } - - # exactly as above, just alternative format for yaml definition - def test_collision_test_names_get_hash( - self, - project, - ): - """These tests don't collide, since they have user-provided custom names, - defined using an alternative format""" - super().test_generic_tests_with_custom_names(project) - - -class TestInvalidSchema: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": invalid_schema_models__schema_yml, - "model.sql": invalid_schema_models__model_sql, - } - - def test_invalid_schema_file( - self, - project, - ): - with pytest.raises(ParsingError) as exc: - run_dbt() - assert re.search(r"'models' is not a list", str(exc)) - - -class TestCommentedSchema: - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": all_quotes_schema__schema_yml, - "model.sql": invalid_schema_models__model_sql, - } - - def test_quoted_schema_file(self, project): - try: - # A schema file consisting entirely of quotes should not be a problem - run_dbt(["parse"]) - except TypeError: - assert ( - False - ), "`dbt parse` failed with a yaml file that is all comments with the same exception as 3568" - except Exception: - assert False, "`dbt parse` failed with a yaml file that is all comments" - - -class TestWrongSpecificationBlock: - @pytest.fixture(scope="class") - def models(self): - return {"schema.yml": wrong_specification_block__schema_yml} - - @pytest.fixture(scope="class") - def seeds(self): - return {"some_seed.csv": seeds__some_seed_csv} - - def test_wrong_specification_block( - self, - project, - ): - with pytest.warns(Warning): - results = run_dbt( - [ - "ls", - "-s", - "some_seed", - "--output", - "json", - "--output-keys", - "name", - "description", - ] - ) - - assert len(results) == 1 - assert results[0] == '{"name": "some_seed", "description": ""}' - - -class TestSchemaTestContextWhereSubq: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - test_context_where_subq_macros_file = { - "custom_generic_test.sql": test_context_where_subq_macros__custom_generic_test_sql - } - write_project_files( - project_root, "test-context-where-subq-macros", test_context_where_subq_macros_file - ) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": test_context_where_subq_models__schema_yml, - "model_a.sql": test_context_where_subq_models__model_a_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "macro-paths": ["test-context-where-subq-macros"], - } - - def test_test_context_tests( - self, - project, - ): - # This test tests that get_where_subquery() is included in TestContext + TestMacroNamespace, - # otherwise api.Relation.create() will return an error - results = run_dbt() - assert len(results) == 1 - - results = run_dbt(["test"]) - assert len(results) == 1 - - -class TestCustomSchemaTestMacroResolutionOrder: - @pytest.fixture(scope="class", autouse=True) - def setUp(self, project_root): - alt_local_utils_file = { - "dbt_project.yml": local_utils__dbt_project_yml, - "macros": { - "datediff.sql": alt_local_utils__macros__type_timestamp_sql, - }, - } - write_project_files(project_root, "alt_local_utils", alt_local_utils_file) - - macros_resolution_order_file = { - "my_custom_test.sql": macro_resolution_order_macros__my_custom_test_sql, - } - write_project_files( - project_root, "macro_resolution_order_macros", macros_resolution_order_file - ) - - @pytest.fixture(scope="class") - def models(self): - return { - "schema.yml": macro_resolution_order_models__config_yml, - "my_model.sql": macro_resolution_order_models__my_model_sql, - } - - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "macro-paths": ["macro_resolution_order_macros"], - } - - @pytest.fixture(scope="class") - def packages(self): - return {"packages": [{"local": "alt_local_utils"}]} - - def test_macro_resolution_test_namespace( - self, - project, - ): - # https://github.com/dbt-labs/dbt-core/issues/5720 - # Previously, macros called as 'dbt.some_macro' would not correctly - # resolve to 'some_macro' from the 'dbt' namespace during static analysis, - # if 'some_macro' also existed in an installed package, - # leading to the macro being missing in the TestNamespace - run_dbt(["deps"]) - run_dbt(["parse"]) From 4555e5dc953978569a3794e850fb93ca215f4ee9 Mon Sep 17 00:00:00 2001 From: Quigley Malcolm <QMalcolm@users.noreply.github.com> Date: Fri, 18 Oct 2024 05:53:01 -0700 Subject: [PATCH 105/114] Add test for ensuring bad incremental strategies get validated/caught (#160) --- tests/functional/adapter/test_basic.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/functional/adapter/test_basic.py b/tests/functional/adapter/test_basic.py index bf31a1232..d5a01300e 100644 --- a/tests/functional/adapter/test_basic.py +++ b/tests/functional/adapter/test_basic.py @@ -10,6 +10,7 @@ from dbt.tests.adapter.basic.test_incremental import ( BaseIncremental, BaseIncrementalNotSchemaChange, + BaseIncrementalBadStrategy, ) from dbt.tests.adapter.basic.test_singular_tests import BaseSingularTests from dbt.tests.adapter.basic.test_singular_tests_ephemeral import BaseSingularTestsEphemeral @@ -55,6 +56,10 @@ class TestBaseIncrementalNotSchemaChange(BaseIncrementalNotSchemaChange): pass +class TestBaseIncrementalBadStrategy(BaseIncrementalBadStrategy): + pass + + class TestSingularTests(BaseSingularTests): pass From ae48e67dae6c1b00cda37ee9bdc61d3330506638 Mon Sep 17 00:00:00 2001 From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Tue, 5 Nov 2024 10:42:55 -0800 Subject: [PATCH 106/114] move github runner from macos-12 to macos-14 (#171) --- .github/workflows/integration-tests.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 0e5f41273..304c139c0 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -147,7 +147,7 @@ jobs: strategy: fail-fast: false matrix: - platform: [ubuntu-22.04, macos-12] + platform: [ubuntu-22.04, macos-14] python-version: ["3.9", "3.12"] steps: - name: "Check out repository" @@ -158,6 +158,11 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: "Install postgresql on MacOS" + if: ${{ matrix.platform == 'macos-14' }} + run: | + brew install postgresql + - name: "Test psycopg2 name - default" run: .github/scripts/psycopg2-check.sh env: From 6ff90bb39946ff931bcb7f513d5646c06177619a Mon Sep 17 00:00:00 2001 From: GitHub Build Bot <buildbot@fishtownanalytics.com> Date: Mon, 2 Dec 2024 18:04:08 +0000 Subject: [PATCH 107/114] Generate changelog at .changes/1.9.0-rc1.md --- .changes/1.9.0-rc1.md | 14 ++++++++++++++ .../Breaking Changes-20241016-175527.yaml | 0 .../Features-20240927-133708.yaml | 0 CHANGELOG.md | 16 +++++++++++++++- 4 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 .changes/1.9.0-rc1.md rename .changes/{unreleased => 1.9.0}/Breaking Changes-20241016-175527.yaml (100%) rename .changes/{unreleased => 1.9.0}/Features-20240927-133708.yaml (100%) diff --git a/.changes/1.9.0-rc1.md b/.changes/1.9.0-rc1.md new file mode 100644 index 000000000..ec1a68cfd --- /dev/null +++ b/.changes/1.9.0-rc1.md @@ -0,0 +1,14 @@ +## dbt-postgres 1.9.0-rc1 - December 02, 2024 + +### Breaking Changes + +- Drop support for Python 3.8 ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161)) + +### Features + +- Enable setting current value of dbt_valid_to ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) + +### Contributors +- [@gshank](https://github.com/gshank) ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) +- [@mikealfare](https://github.com/mikealfare) ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161)) + diff --git a/.changes/unreleased/Breaking Changes-20241016-175527.yaml b/.changes/1.9.0/Breaking Changes-20241016-175527.yaml similarity index 100% rename from .changes/unreleased/Breaking Changes-20241016-175527.yaml rename to .changes/1.9.0/Breaking Changes-20241016-175527.yaml diff --git a/.changes/unreleased/Features-20240927-133708.yaml b/.changes/1.9.0/Features-20240927-133708.yaml similarity index 100% rename from .changes/unreleased/Features-20240927-133708.yaml rename to .changes/1.9.0/Features-20240927-133708.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index fc95b95f0..7b1d35595 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,21 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), and is generated by [Changie](https://github.com/miniscruff/changie). +## dbt-postgres 1.9.0-rc1 - December 02, 2024 + +### Breaking Changes + +- Drop support for Python 3.8 ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161)) + +### Features + +- Enable setting current value of dbt_valid_to ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) + +### Contributors +- [@gshank](https://github.com/gshank) ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) +- [@mikealfare](https://github.com/mikealfare) ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161)) + + ## dbt-postgres 1.9.0-b1 - September 25, 2024 ### Features @@ -36,7 +51,6 @@ and is generated by [Changie](https://github.com/miniscruff/changie). - [@peterallenwebb](https://github.com/peterallenwebb) ([#123](https://github.com/dbt-labs/dbt-postgres/issues/123)) - [@versusfacit](https://github.com/versusfacit) ([#85](https://github.com/dbt-labs/dbt-postgres/issues/85)) - ## Previous Releases For information on prior major and minor releases, see their changelogs: - [1.8](https://github.com/dbt-labs/dbt-postgres/blob/1.8.latest/CHANGELOG.md) From a185fda859907700ae06566c0c401abcc460c3ae Mon Sep 17 00:00:00 2001 From: GitHub Build Bot <buildbot@fishtownanalytics.com> Date: Mon, 2 Dec 2024 18:05:32 +0000 Subject: [PATCH 108/114] Bump version to 1.9.0rc1 --- dbt/adapters/postgres/__version__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbt/adapters/postgres/__version__.py b/dbt/adapters/postgres/__version__.py index a4077fff2..c70591d8a 100644 --- a/dbt/adapters/postgres/__version__.py +++ b/dbt/adapters/postgres/__version__.py @@ -1 +1 @@ -version = "1.9.0b1" +version = "1.9.0rc1" From 2a245896a7236d250a9a5d73cc9ccddcafcdc708 Mon Sep 17 00:00:00 2001 From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Wed, 4 Dec 2024 09:05:49 -0800 Subject: [PATCH 109/114] update libpq-dev dependency to 13.18-0+deb11u1 (#177) --- .changes/1.9.0-rc1.md | 1 - docker/Dockerfile | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.changes/1.9.0-rc1.md b/.changes/1.9.0-rc1.md index ec1a68cfd..2b0694d77 100644 --- a/.changes/1.9.0-rc1.md +++ b/.changes/1.9.0-rc1.md @@ -11,4 +11,3 @@ ### Contributors - [@gshank](https://github.com/gshank) ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) - [@mikealfare](https://github.com/mikealfare) ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161)) - diff --git a/docker/Dockerfile b/docker/Dockerfile index cfbc81aed..c26143947 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -9,7 +9,7 @@ RUN apt-get update \ build-essential=12.9 \ ca-certificates=20210119 \ git=1:2.30.2-1+deb11u2 \ - libpq-dev=13.14-0+deb11u1 \ + libpq-dev=13.18-0+deb11u1 \ make=4.3-4.1 \ openssh-client=1:8.4p1-5+deb11u3 \ software-properties-common=0.96.20.2-2.1 \ From cc7189f9608576c0b61bd57137ea91332051912a Mon Sep 17 00:00:00 2001 From: GitHub Build Bot <buildbot@fishtownanalytics.com> Date: Mon, 9 Dec 2024 18:17:06 +0000 Subject: [PATCH 110/114] Generate changelog at .changes/1.9.0.md --- .changes/1.9.0-rc1.md | 13 ------------- .changes/{1.9.0-b1.md => 1.9.0.md} | 12 +++++++++--- .../Breaking Changes-20241016-175527.yaml | 6 ------ .changes/1.9.0/Features-20240430-185700.yaml | 6 ------ .changes/1.9.0/Features-20240501-151856.yaml | 6 ------ .changes/1.9.0/Features-20240731-210800.yaml | 6 ------ .changes/1.9.0/Features-20240903-160221.yaml | 6 ------ .changes/1.9.0/Features-20240911-141416.yaml | 6 ------ .changes/1.9.0/Features-20240927-133708.yaml | 6 ------ .changes/1.9.0/Fixes-20240514-193201.yaml | 6 ------ .changes/1.9.0/Fixes-20240605-202614.yaml | 7 ------- .changes/1.9.0/Fixes-20240626-163930.yaml | 6 ------ .../1.9.0/Under the Hood-20240716-172442.yaml | 6 ------ .../1.9.0/Under the Hood-20240731-075011.yaml | 6 ------ CHANGELOG.md | 19 +++++-------------- 15 files changed, 14 insertions(+), 103 deletions(-) delete mode 100644 .changes/1.9.0-rc1.md rename .changes/{1.9.0-b1.md => 1.9.0.md} (78%) delete mode 100644 .changes/1.9.0/Breaking Changes-20241016-175527.yaml delete mode 100644 .changes/1.9.0/Features-20240430-185700.yaml delete mode 100644 .changes/1.9.0/Features-20240501-151856.yaml delete mode 100644 .changes/1.9.0/Features-20240731-210800.yaml delete mode 100644 .changes/1.9.0/Features-20240903-160221.yaml delete mode 100644 .changes/1.9.0/Features-20240911-141416.yaml delete mode 100644 .changes/1.9.0/Features-20240927-133708.yaml delete mode 100644 .changes/1.9.0/Fixes-20240514-193201.yaml delete mode 100644 .changes/1.9.0/Fixes-20240605-202614.yaml delete mode 100644 .changes/1.9.0/Fixes-20240626-163930.yaml delete mode 100644 .changes/1.9.0/Under the Hood-20240716-172442.yaml delete mode 100644 .changes/1.9.0/Under the Hood-20240731-075011.yaml diff --git a/.changes/1.9.0-rc1.md b/.changes/1.9.0-rc1.md deleted file mode 100644 index 2b0694d77..000000000 --- a/.changes/1.9.0-rc1.md +++ /dev/null @@ -1,13 +0,0 @@ -## dbt-postgres 1.9.0-rc1 - December 02, 2024 - -### Breaking Changes - -- Drop support for Python 3.8 ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161)) - -### Features - -- Enable setting current value of dbt_valid_to ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) - -### Contributors -- [@gshank](https://github.com/gshank) ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) -- [@mikealfare](https://github.com/mikealfare) ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161)) diff --git a/.changes/1.9.0-b1.md b/.changes/1.9.0.md similarity index 78% rename from .changes/1.9.0-b1.md rename to .changes/1.9.0.md index 59bd21c56..ef3364771 100644 --- a/.changes/1.9.0-b1.md +++ b/.changes/1.9.0.md @@ -1,4 +1,8 @@ -## dbt-postgres 1.9.0-b1 - September 25, 2024 +## dbt-postgres 1.9.0 - December 09, 2024 + +### Breaking Changes + +- Drop support for Python 3.8 ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161)) ### Features @@ -7,6 +11,7 @@ - Add support for Python 3.12 ([#17](https://github.com/dbt-labs/dbt-postgres/issues/17)) - Allow configuring snapshot column names ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144)) - Microbatch incremental strategy implementation: merge ([#149](https://github.com/dbt-labs/dbt-postgres/issues/149)) +- Enable setting current value of dbt_valid_to ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) ### Fixes @@ -21,10 +26,11 @@ ### Contributors - [@dbeatty10](https://github.com/dbeatty10) ([#76](https://github.com/dbt-labs/dbt-postgres/issues/76), [#82](https://github.com/dbt-labs/dbt-postgres/issues/82)) -- [@gshank](https://github.com/gshank) ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144)) +- [@gshank](https://github.com/gshank) ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144), [#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) - [@leahwicz](https://github.com/leahwicz) ([#109](https://github.com/dbt-labs/dbt-postgres/issues/109)) - [@michelleark](https://github.com/michelleark) ([#149](https://github.com/dbt-labs/dbt-postgres/issues/149)) -- [@mikealfare](https://github.com/mikealfare) ([#17](https://github.com/dbt-labs/dbt-postgres/issues/17), [#96](https://github.com/dbt-labs/dbt-postgres/issues/96)) +- [@mikealfare](https://github.com/mikealfare) ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161), [#17](https://github.com/dbt-labs/dbt-postgres/issues/17), [#96](https://github.com/dbt-labs/dbt-postgres/issues/96)) - [@morsapaes](https://github.com/morsapaes) ([#120](https://github.com/dbt-labs/dbt-postgres/issues/120)) - [@peterallenwebb](https://github.com/peterallenwebb) ([#123](https://github.com/dbt-labs/dbt-postgres/issues/123)) - [@versusfacit](https://github.com/versusfacit) ([#85](https://github.com/dbt-labs/dbt-postgres/issues/85)) + diff --git a/.changes/1.9.0/Breaking Changes-20241016-175527.yaml b/.changes/1.9.0/Breaking Changes-20241016-175527.yaml deleted file mode 100644 index d1e4df8fb..000000000 --- a/.changes/1.9.0/Breaking Changes-20241016-175527.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Breaking Changes -body: Drop support for Python 3.8 -time: 2024-10-16T17:55:27.844499-04:00 -custom: - Author: mikealfare - Issue: "161" diff --git a/.changes/1.9.0/Features-20240430-185700.yaml b/.changes/1.9.0/Features-20240430-185700.yaml deleted file mode 100644 index 638d10625..000000000 --- a/.changes/1.9.0/Features-20240430-185700.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Add tests for cross-database `cast` macro -time: 2024-04-30T18:57:00.437045-06:00 -custom: - Author: dbeatty10 - Issue: "76" diff --git a/.changes/1.9.0/Features-20240501-151856.yaml b/.changes/1.9.0/Features-20240501-151856.yaml deleted file mode 100644 index 2dda71939..000000000 --- a/.changes/1.9.0/Features-20240501-151856.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Cross-database `date` macro -time: 2024-05-01T15:18:56.758715-06:00 -custom: - Author: dbeatty10 - Issue: 82 diff --git a/.changes/1.9.0/Features-20240731-210800.yaml b/.changes/1.9.0/Features-20240731-210800.yaml deleted file mode 100644 index b2fc1f0ad..000000000 --- a/.changes/1.9.0/Features-20240731-210800.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Add support for Python 3.12 -time: 2024-07-31T21:08:00.170999-04:00 -custom: - Author: mikealfare - Issue: "17" diff --git a/.changes/1.9.0/Features-20240903-160221.yaml b/.changes/1.9.0/Features-20240903-160221.yaml deleted file mode 100644 index 8409e2486..000000000 --- a/.changes/1.9.0/Features-20240903-160221.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Allow configuring snapshot column names -time: 2024-09-03T16:02:21.069085-04:00 -custom: - Author: gshank - Issue: "144" diff --git a/.changes/1.9.0/Features-20240911-141416.yaml b/.changes/1.9.0/Features-20240911-141416.yaml deleted file mode 100644 index 990a09d9f..000000000 --- a/.changes/1.9.0/Features-20240911-141416.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: 'Microbatch incremental strategy implementation: merge' -time: 2024-09-11T14:14:16.538536-04:00 -custom: - Author: michelleark - Issue: "149" diff --git a/.changes/1.9.0/Features-20240927-133708.yaml b/.changes/1.9.0/Features-20240927-133708.yaml deleted file mode 100644 index b2dba3337..000000000 --- a/.changes/1.9.0/Features-20240927-133708.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Features -body: Enable setting current value of dbt_valid_to -time: 2024-09-27T13:37:08.808843-04:00 -custom: - Author: gshank - Issue: "151" diff --git a/.changes/1.9.0/Fixes-20240514-193201.yaml b/.changes/1.9.0/Fixes-20240514-193201.yaml deleted file mode 100644 index 95ab24673..000000000 --- a/.changes/1.9.0/Fixes-20240514-193201.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix the semicolon semantics for indexes while respecting other bug fix -time: 2024-05-14T19:32:01.149383-07:00 -custom: - Author: versusfacit - Issue: "85" diff --git a/.changes/1.9.0/Fixes-20240605-202614.yaml b/.changes/1.9.0/Fixes-20240605-202614.yaml deleted file mode 100644 index b7ab8eb06..000000000 --- a/.changes/1.9.0/Fixes-20240605-202614.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Default to psycopg2-binary and allow overriding to psycopg2 via DBT_PSYCOPG2_NAME - (restores previous behavior) -time: 2024-06-05T20:26:14.801254-04:00 -custom: - Author: mikealfare - Issue: "96" diff --git a/.changes/1.9.0/Fixes-20240626-163930.yaml b/.changes/1.9.0/Fixes-20240626-163930.yaml deleted file mode 100644 index 37fcc56f9..000000000 --- a/.changes/1.9.0/Fixes-20240626-163930.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix `persist_docs` for `materialized_view` materializations. Previously, using this configuration with materialized view models would lead to an error. -time: 2024-06-26T16:39:30.455995+02:00 -custom: - Author: morsapaes - Issue: "120" diff --git a/.changes/1.9.0/Under the Hood-20240716-172442.yaml b/.changes/1.9.0/Under the Hood-20240716-172442.yaml deleted file mode 100644 index 8777edbb7..000000000 --- a/.changes/1.9.0/Under the Hood-20240716-172442.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Add support for experimental record/replay testing. -time: 2024-07-16T17:24:42.271859-04:00 -custom: - Author: peterallenwebb - Issue: "123" diff --git a/.changes/1.9.0/Under the Hood-20240731-075011.yaml b/.changes/1.9.0/Under the Hood-20240731-075011.yaml deleted file mode 100644 index 185c91484..000000000 --- a/.changes/1.9.0/Under the Hood-20240731-075011.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Updating changie.yaml to add contributors and PR links -time: 2024-07-31T07:50:11.875044-04:00 -custom: - Author: leahwicz - Issue: "109" diff --git a/CHANGELOG.md b/CHANGELOG.md index 7b1d35595..a4fb6e217 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html), and is generated by [Changie](https://github.com/miniscruff/changie). -## dbt-postgres 1.9.0-rc1 - December 02, 2024 +## dbt-postgres 1.9.0 - December 09, 2024 ### Breaking Changes @@ -13,22 +13,12 @@ and is generated by [Changie](https://github.com/miniscruff/changie). ### Features -- Enable setting current value of dbt_valid_to ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) - -### Contributors -- [@gshank](https://github.com/gshank) ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) -- [@mikealfare](https://github.com/mikealfare) ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161)) - - -## dbt-postgres 1.9.0-b1 - September 25, 2024 - -### Features - - Add tests for cross-database `cast` macro ([#76](https://github.com/dbt-labs/dbt-postgres/issues/76)) - Cross-database `date` macro ([#82](https://github.com/dbt-labs/dbt-postgres/issues/82)) - Add support for Python 3.12 ([#17](https://github.com/dbt-labs/dbt-postgres/issues/17)) - Allow configuring snapshot column names ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144)) - Microbatch incremental strategy implementation: merge ([#149](https://github.com/dbt-labs/dbt-postgres/issues/149)) +- Enable setting current value of dbt_valid_to ([#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) ### Fixes @@ -43,14 +33,15 @@ and is generated by [Changie](https://github.com/miniscruff/changie). ### Contributors - [@dbeatty10](https://github.com/dbeatty10) ([#76](https://github.com/dbt-labs/dbt-postgres/issues/76), [#82](https://github.com/dbt-labs/dbt-postgres/issues/82)) -- [@gshank](https://github.com/gshank) ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144)) +- [@gshank](https://github.com/gshank) ([#144](https://github.com/dbt-labs/dbt-postgres/issues/144), [#151](https://github.com/dbt-labs/dbt-postgres/issues/151)) - [@leahwicz](https://github.com/leahwicz) ([#109](https://github.com/dbt-labs/dbt-postgres/issues/109)) - [@michelleark](https://github.com/michelleark) ([#149](https://github.com/dbt-labs/dbt-postgres/issues/149)) -- [@mikealfare](https://github.com/mikealfare) ([#17](https://github.com/dbt-labs/dbt-postgres/issues/17), [#96](https://github.com/dbt-labs/dbt-postgres/issues/96)) +- [@mikealfare](https://github.com/mikealfare) ([#161](https://github.com/dbt-labs/dbt-postgres/issues/161), [#17](https://github.com/dbt-labs/dbt-postgres/issues/17), [#96](https://github.com/dbt-labs/dbt-postgres/issues/96)) - [@morsapaes](https://github.com/morsapaes) ([#120](https://github.com/dbt-labs/dbt-postgres/issues/120)) - [@peterallenwebb](https://github.com/peterallenwebb) ([#123](https://github.com/dbt-labs/dbt-postgres/issues/123)) - [@versusfacit](https://github.com/versusfacit) ([#85](https://github.com/dbt-labs/dbt-postgres/issues/85)) + ## Previous Releases For information on prior major and minor releases, see their changelogs: - [1.8](https://github.com/dbt-labs/dbt-postgres/blob/1.8.latest/CHANGELOG.md) From 45e051a37e083781ecbb87e94091097ab4429079 Mon Sep 17 00:00:00 2001 From: GitHub Build Bot <buildbot@fishtownanalytics.com> Date: Mon, 9 Dec 2024 18:18:13 +0000 Subject: [PATCH 111/114] Bump version to 1.9.0 --- dbt/adapters/postgres/__version__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbt/adapters/postgres/__version__.py b/dbt/adapters/postgres/__version__.py index c70591d8a..7aba64097 100644 --- a/dbt/adapters/postgres/__version__.py +++ b/dbt/adapters/postgres/__version__.py @@ -1 +1 @@ -version = "1.9.0rc1" +version = "1.9.0" From 864861b9e08c9b87744d92d4a5f99bfe4da8f77f Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Fri, 13 Dec 2024 18:00:45 -0500 Subject: [PATCH 112/114] Pin `github-release` workflow (#186) --- .changes/1.9.0.md | 1 - .github/workflows/release.yml | 2 +- pyproject.toml | 1 + tests/__init__.py | 0 4 files changed, 2 insertions(+), 2 deletions(-) create mode 100644 tests/__init__.py diff --git a/.changes/1.9.0.md b/.changes/1.9.0.md index ef3364771..62c16b1ff 100644 --- a/.changes/1.9.0.md +++ b/.changes/1.9.0.md @@ -33,4 +33,3 @@ - [@morsapaes](https://github.com/morsapaes) ([#120](https://github.com/dbt-labs/dbt-postgres/issues/120)) - [@peterallenwebb](https://github.com/peterallenwebb) ([#123](https://github.com/dbt-labs/dbt-postgres/issues/123)) - [@versusfacit](https://github.com/versusfacit) ([#85](https://github.com/dbt-labs/dbt-postgres/issues/85)) - diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index bbf363993..e3a43e0b4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -96,7 +96,7 @@ jobs: needs: - build-release - release-prep - uses: dbt-labs/dbt-adapters/.github/workflows/github-release.yml@main + uses: dbt-labs/dbt-adapters/.github/workflows/github-release.yml@v1.10.3 with: sha: ${{ needs.release-prep.outputs.release-sha }} version_number: ${{ inputs.version }} diff --git a/pyproject.toml b/pyproject.toml index f3aa52c1e..30dcb5056 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,6 +52,7 @@ packages = ["dbt"] path = "dbt/adapters/postgres/__version__.py" [tool.hatch.envs.default] +python = "3.9" dependencies = [ "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb From 05f0337d6b05c9c68617e41c0b5bca9c2a733783 Mon Sep 17 00:00:00 2001 From: Colin Rogers <111200756+colin-rogers-dbt@users.noreply.github.com> Date: Thu, 9 Jan 2025 16:34:06 -0800 Subject: [PATCH 113/114] delete run_operations tests (#188) --- tests/functional/run_operations/fixtures.py | 72 --------- .../run_operations/test_run_operations.py | 143 ------------------ 2 files changed, 215 deletions(-) delete mode 100644 tests/functional/run_operations/fixtures.py delete mode 100644 tests/functional/run_operations/test_run_operations.py diff --git a/tests/functional/run_operations/fixtures.py b/tests/functional/run_operations/fixtures.py deleted file mode 100644 index f6ed82e20..000000000 --- a/tests/functional/run_operations/fixtures.py +++ /dev/null @@ -1,72 +0,0 @@ -happy_macros_sql = """ -{% macro no_args() %} - {% if execute %} - {% call statement(auto_begin=True) %} - create table "{{ schema }}"."no_args" (id int); - commit; - {% endcall %} - {% endif %} -{% endmacro %} - - -{% macro table_name_args(table_name) %} - {% if execute %} - {% call statement(auto_begin=True) %} - create table "{{ schema }}"."{{ table_name }}" (id int); - commit; - {% endcall %} - {% endif %} -{% endmacro %} - -{% macro select_something(name) %} - {% set query %} - select 'hello, {{ name }}' as name - {% endset %} - {% set table = run_query(query) %} - - {% if table.columns['name'][0] != 'hello, world' %} - {% do exceptions.raise_compiler_error("unexpected result: " ~ table) %} - {% endif %} -{% endmacro %} - -{% macro vacuum(table_name) %} - {% set query %} - vacuum "{{ schema }}"."{{ table_name }}" - {% endset %} - {% do run_query(query) %} -{% endmacro %} - - -{% macro vacuum_ref(ref_target) %} - {% set query %} - vacuum {{ ref(ref_target) }} - {% endset %} - {% do run_query(query) %} -{% endmacro %} - - -{% macro log_graph() %} - {% for node in graph.nodes.values() %} - {{ log((node | string), info=True)}} - {% endfor %} -{% endmacro %} - - -{% macro print_something() %} - {{ print("You're doing awesome!") }} -{% endmacro %} -""" - -sad_macros_sql = """ -{% macro syntax_error() %} - {% if execute %} - {% call statement() %} - select NOPE NOT A VALID QUERY - {% endcall %} - {% endif %} -{% endmacro %} -""" - -model_sql = """ -select 1 as id -""" diff --git a/tests/functional/run_operations/test_run_operations.py b/tests/functional/run_operations/test_run_operations.py deleted file mode 100644 index f5d019ff4..000000000 --- a/tests/functional/run_operations/test_run_operations.py +++ /dev/null @@ -1,143 +0,0 @@ -import os - -from dbt.tests.util import ( - check_table_does_exist, - mkdir, - rm_dir, - rm_file, - write_file, -) -from dbt_common.exceptions import DbtInternalError -import pytest -import yaml - -from tests.functional.run_operations.fixtures import ( - happy_macros_sql, - model_sql, - sad_macros_sql, -) -from tests.functional.utils import run_dbt, run_dbt_and_capture - - -class TestOperations: - @pytest.fixture(scope="class") - def models(self): - return {"model.sql": model_sql} - - @pytest.fixture(scope="class") - def macros(self): - return {"happy_macros.sql": happy_macros_sql, "sad_macros.sql": sad_macros_sql} - - @pytest.fixture(scope="class") - def dbt_profile_data(self, unique_schema): - return { - "test": { - "outputs": { - "default": { - "type": "postgres", - "threads": 4, - "host": "localhost", - "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), - "user": os.getenv("POSTGRES_TEST_USER", "root"), - "pass": os.getenv("POSTGRES_TEST_PASS", "password"), - "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), - "schema": unique_schema, - }, - "noaccess": { - "type": "postgres", - "threads": 4, - "host": "localhost", - "port": int(os.getenv("POSTGRES_TEST_PORT", 5432)), - "user": "noaccess", - "pass": "password", - "dbname": os.getenv("POSTGRES_TEST_DATABASE", "dbt"), - "schema": unique_schema, - }, - }, - "target": "default", - }, - } - - def run_operation(self, macro, expect_pass=True, extra_args=None, **kwargs): - args = ["run-operation", macro] - if kwargs: - args.extend(("--args", yaml.safe_dump(kwargs))) - if extra_args: - args.extend(extra_args) - return run_dbt(args, expect_pass=expect_pass) - - def test_macro_noargs(self, project): - self.run_operation("no_args") - check_table_does_exist(project.adapter, "no_args") - - def test_macro_args(self, project): - self.run_operation("table_name_args", table_name="my_fancy_table") - check_table_does_exist(project.adapter, "my_fancy_table") - - def test_macro_exception(self, project): - self.run_operation("syntax_error", False) - - def test_macro_missing(self, project): - with pytest.raises( - DbtInternalError, - match="dbt could not find a macro with the name 'this_macro_does_not_exist' in any package", - ): - self.run_operation("this_macro_does_not_exist", False) - - def test_cannot_connect(self, project): - self.run_operation("no_args", extra_args=["--target", "noaccess"], expect_pass=False) - - def test_vacuum(self, project): - run_dbt(["run"]) - # this should succeed - self.run_operation("vacuum", table_name="model") - - def test_vacuum_ref(self, project): - run_dbt(["run"]) - # this should succeed - self.run_operation("vacuum_ref", ref_target="model") - - def test_select(self, project): - self.run_operation("select_something", name="world") - - def test_access_graph(self, project): - self.run_operation("log_graph") - - def test_print(self, project): - # Tests that calling the `print()` macro does not cause an exception - self.run_operation("print_something") - - def test_run_operation_local_macro(self, project): - pkg_macro = """ -{% macro something_cool() %} - {{ log("something cool", info=true) }} -{% endmacro %} - """ - - mkdir("pkg/macros") - - write_file(pkg_macro, "pkg/macros/something_cool.sql") - - pkg_yaml = """ -packages: - - local: pkg - """ - - write_file(pkg_yaml, "packages.yml") - - pkg_dbt_project = """ -name: 'pkg' - """ - - write_file(pkg_dbt_project, "pkg/dbt_project.yml") - - run_dbt(["deps"]) - - results, log_output = run_dbt_and_capture(["run-operation", "something_cool"]) - assert "something cool" in log_output - - results, log_output = run_dbt_and_capture(["run-operation", "pkg.something_cool"]) - assert "something cool" in log_output - - rm_dir("pkg") - rm_file("packages.yml") From 49141a6837a6546ebe26195d651f65565ac307a5 Mon Sep 17 00:00:00 2001 From: Mike Alfare <13974384+mikealfare@users.noreply.github.com> Date: Tue, 10 Dec 2024 19:02:56 -0500 Subject: [PATCH 114/114] ADAP-1123: Create the package subdirectory (#176) --- .github/workflows/changelog-entry-check.yml | 8 +- .github/workflows/code-quality.yml | 4 +- .github/workflows/integration-tests.yml | 17 ++- .github/workflows/release.yml | 7 +- .github/workflows/unit-tests.yml | 1 + .github/workflows/version-bump.yml | 5 + {.changes => dbt-postgres/.changes}/0.0.0.md | 0 .../.changes/1.9.0-b1.md | 0 {.changes => dbt-postgres/.changes}/1.9.0.md | 0 .../.changes}/header.tpl.md | 0 .../.changes/unreleased/.gitkeep | 0 .../Under the Hood-20241201-122244.yaml | 6 + .changie.yaml => dbt-postgres/.changie.yaml | 0 test.env.example => dbt-postgres/.env.example | 0 CHANGELOG.md => dbt-postgres/CHANGELOG.md | 0 .../CONTRIBUTING.md | 0 README.md => dbt-postgres/README.md | 0 {docker => dbt-postgres/docker}/Dockerfile | 0 {docker => dbt-postgres/docker}/README.md | 0 .../docker}/dev.Dockerfile | 0 dbt-postgres/hatch.toml | 62 ++++++++++ dbt-postgres/pyproject.toml | 49 ++++++++ .../scripts/psycopg2-check.sh | 0 .../scripts}/setup_test_database.sql | 0 .../scripts/update_dev_dependency_branches.sh | 0 {dbt => dbt-postgres/src/dbt}/__init__.py | 0 .../src/dbt}/adapters/postgres/__init__.py | 0 .../src/dbt}/adapters/postgres/__version__.py | 0 .../src/dbt}/adapters/postgres/column.py | 0 .../src/dbt}/adapters/postgres/connections.py | 0 .../src/dbt}/adapters/postgres/impl.py | 0 .../dbt}/adapters/postgres/record/__init__.py | 0 .../adapters/postgres/record/cursor/cursor.py | 0 .../adapters/postgres/record/cursor/status.py | 0 .../dbt}/adapters/postgres/record/handle.py | 0 .../src/dbt}/adapters/postgres/relation.py | 0 .../postgres/relation_configs/__init__.py | 0 .../postgres/relation_configs/constants.py | 0 .../postgres/relation_configs/index.py | 0 .../relation_configs/materialized_view.py | 0 .../src/dbt}/include/postgres/__init__.py | 0 .../src/dbt}/include/postgres/dbt_project.yml | 0 .../dbt}/include/postgres/macros/adapters.sql | 0 .../dbt}/include/postgres/macros/catalog.sql | 0 .../incremental_strategies.sql | 0 .../materializations/snapshot_merge.sql | 0 .../include/postgres/macros/relations.sql | 0 .../relations/materialized_view/alter.sql | 0 .../relations/materialized_view/create.sql | 0 .../relations/materialized_view/describe.sql | 0 .../relations/materialized_view/drop.sql | 0 .../relations/materialized_view/refresh.sql | 0 .../relations/materialized_view/rename.sql | 0 .../postgres/macros/relations/table/drop.sql | 0 .../macros/relations/table/rename.sql | 0 .../macros/relations/table/replace.sql | 0 .../postgres/macros/relations/view/drop.sql | 0 .../postgres/macros/relations/view/rename.sql | 0 .../macros/relations/view/replace.sql | 0 .../include/postgres/macros/timestamps.sql | 0 .../postgres/macros/utils/any_value.sql | 0 .../macros/utils/columns_spec_ddl.sql | 0 .../include/postgres/macros/utils/dateadd.sql | 0 .../postgres/macros/utils/datediff.sql | 0 .../postgres/macros/utils/last_day.sql | 0 .../include/postgres/macros/utils/listagg.sql | 0 .../postgres/macros/utils/split_part.sql | 0 .../include/postgres/profile_template.yml | 0 .../dbt}/include/postgres/sample_profiles.yml | 0 {tests => dbt-postgres/tests}/conftest.py | 0 .../tests}/functional/README.md | 0 .../tests}/functional/__init__.py | 0 .../tests}/functional/adapter/__init__.py | 0 .../tests}/functional/adapter/test_aliases.py | 0 .../tests}/functional/adapter/test_basic.py | 0 .../tests}/functional/adapter/test_caching.py | 0 .../tests}/functional/adapter/test_clone.py | 0 .../functional/adapter/test_column_types.py | 0 .../functional/adapter/test_concurrency.py | 0 .../functional/adapter/test_constraints.py | 0 .../functional/adapter/test_data_types.py | 0 .../tests}/functional/adapter/test_debug.py | 0 .../tests}/functional/adapter/test_empty.py | 0 .../functional/adapter/test_ephemeral.py | 0 .../tests}/functional/adapter/test_grants.py | 0 .../adapter/test_hooks/data/seed_model.sql | 0 .../adapter/test_hooks/data/seed_run.sql | 0 .../adapter/test_hooks/test_hooks.py | 0 .../functional/adapter/test_incremental.py | 0 .../adapter/test_incremental_microbatch.py | 0 .../functional/adapter/test_persist_docs.py | 0 .../functional/adapter/test_query_comment.py | 0 .../functional/adapter/test_relations.py | 0 .../tests}/functional/adapter/test_show.py | 0 .../functional/adapter/test_simple_copy.py | 0 .../adapter/test_simple_seed/seed_bom.csv | 0 .../test_simple_seed/test_simple_seed.py | 0 .../adapter/test_simple_snapshot.py | 0 .../adapter/test_store_test_failures.py | 0 .../functional/adapter/test_unit_testing.py | 0 .../tests}/functional/adapter/test_utils.py | 0 .../functional/basic/data/seed-initial.csv | 0 .../functional/basic/data/seed-update.csv | 0 .../basic/data/summary_expected.csv | 0 .../basic/data/summary_expected_update.csv | 0 .../functional/basic/data/varchar10_seed.sql | 0 .../functional/basic/data/varchar300_seed.sql | 0 .../tests}/functional/basic/test_basic.py | 0 .../basic/test_invalid_reference.py | 0 .../functional/basic/test_jaffle_shop.py | 0 .../functional/basic/test_mixed_case_db.py | 0 .../tests}/functional/basic/test_project.py | 0 .../functional/basic/test_simple_reference.py | 0 .../functional/basic/test_varchar_widening.py | 0 .../tests}/functional/compile/fixtures.py | 0 .../tests}/functional/compile/test_compile.py | 0 .../tests}/functional/conftest.py | 0 .../contracts/test_contract_enforcement.py | 0 .../contracts/test_contract_precision.py | 0 .../contracts/test_nonstandard_data_type.py | 0 .../functional/custom_aliases/fixtures.py | 0 .../custom_aliases/test_custom_aliases.py | 0 .../data/seed_expected.sql | 0 .../test_custom_singular_tests.py | 0 .../functional/dbt_debug/test_dbt_debug.py | 0 .../tests}/functional/dbt_runner.py | 0 .../tests}/functional/exit_codes/fixtures.py | 0 .../functional/exit_codes/test_exit_codes.py | 0 .../tests}/functional/exposures/fixtures.py | 0 .../exposures/test_exposure_configs.py | 0 .../functional/exposures/test_exposures.py | 0 .../graph_selection/test_graph_selection.py | 0 .../graph_selection/test_group_selection.py | 0 .../test_intersection_syntax.py | 0 .../test_schema_test_graph_selection.py | 0 .../graph_selection/test_tag_selection.py | 0 .../graph_selection/test_version_selection.py | 0 .../incremental_schema_tests/fixtures.py | 0 .../test_incremental_schema.py | 0 .../test_invalid_models.py | 0 .../invalid_model_tests/test_model_warning.py | 0 .../tests}/functional/macros/data/seed.sql | 0 .../tests}/functional/macros/fixtures.py | 0 .../package_macro_overrides/dbt_project.yml | 0 .../package_macro_overrides/macros/macros.sql | 0 .../tests}/functional/macros/test_macros.py | 0 .../functional/materializations/conftest.py | 0 .../functional/materializations/fixtures.py | 0 .../test_materialized_view.py | 0 .../test_postgres_materialized_view.py | 0 .../materialized_view_tests/utils.py | 0 .../materializations/test_incremental.py | 0 .../test_runtime_materialization.py | 0 .../test_supported_languages.py | 0 .../tests}/functional/postgres/fixtures.py | 0 .../functional/postgres/test_indexes.py | 0 .../tests}/functional/projects/__init__.py | 0 .../projects/dbt_integration/__init__.py | 0 .../dbt_integration/macros/do_something.sql | 0 .../dbt_integration/models/incremental.sql | 0 .../projects/dbt_integration/models/table.sql | 0 .../projects/dbt_integration/models/view.sql | 0 .../dbt_integration/schemas/project.yml | 0 .../dbt_integration/schemas/schema.yml | 0 .../projects/graph_selection/__init__.py | 0 .../projects/graph_selection/data/seed.csv | 0 .../graph_selection/data/summary_expected.csv | 0 .../models/alternative_users.sql | 0 .../graph_selection/models/base_users.sql | 0 .../graph_selection/models/emails.sql | 0 .../graph_selection/models/emails_alt.sql | 0 .../graph_selection/models/nested_users.sql | 0 .../graph_selection/models/never_selected.sql | 0 .../graph_selection/models/subdir.sql | 0 .../projects/graph_selection/models/users.sql | 0 .../graph_selection/models/users_rollup.sql | 0 .../models/users_rollup_dependency.sql | 0 .../schemas/patch_path_selection.yml | 0 .../graph_selection/schemas/properties.yml | 0 .../graph_selection/schemas/schema.yml | 0 .../projects/jaffle_shop/__init__.py | 0 .../jaffle_shop/data/raw_customers.csv | 0 .../projects/jaffle_shop/data/raw_orders.csv | 0 .../jaffle_shop/data/raw_payments.csv | 0 .../projects/jaffle_shop/docs/docs.md | 0 .../projects/jaffle_shop/docs/overview.md | 0 .../projects/jaffle_shop/models/customers.sql | 0 .../projects/jaffle_shop/models/orders.sql | 0 .../jaffle_shop/schemas/jaffle_shop.yml | 0 .../projects/jaffle_shop/schemas/staging.yml | 0 .../jaffle_shop/staging/stg_customers.sql | 0 .../jaffle_shop/staging/stg_orders.sql | 0 .../jaffle_shop/staging/stg_payments.sql | 0 .../tests}/functional/projects/utils.py | 0 .../tests}/functional/retry/fixtures.py | 0 .../tests}/functional/retry/test_retry.py | 0 .../functional/schema/fixtures/macros.py | 0 .../tests}/functional/schema/fixtures/sql.py | 0 .../functional/schema/test_custom_schema.py | 0 .../functional/selected_resources/fixtures.py | 0 .../test_selected_resources.py | 0 .../functional/semantic_models/fixtures.py | 0 .../test_semantic_model_configs.py | 0 .../test_semantic_model_parsing.py | 0 .../semantic_models/test_semantic_models.py | 0 .../tests}/functional/show/fixtures.py | 0 .../tests}/functional/show/test_show.py | 0 .../functional/sources/common_source_setup.py | 0 .../tests}/functional/sources/data/seed.sql | 0 .../tests}/functional/sources/fixtures.py | 0 .../functional/sources/test_simple_source.py | 0 .../functional/sources/test_source_configs.py | 0 .../sources/test_source_fresher_state.py | 0 .../sources/test_source_freshness.py | 0 .../tests}/functional/statements/fixtures.py | 0 .../functional/statements/test_statements.py | 0 .../tests}/functional/test_access.py | 0 .../tests}/functional/test_analyses.py | 0 .../tests}/functional/test_catalog.py | 0 .../tests}/functional/test_clean.py | 0 .../tests}/functional/test_colors.py | 0 .../tests}/functional/test_column_quotes.py | 0 .../tests}/functional/test_config.py | 0 .../functional/test_connection_manager.py | 0 .../functional/test_custom_target_path.py | 0 .../tests}/functional/test_cycles.py | 0 .../functional/test_default_selectors.py | 0 .../tests}/functional/test_events.py | 0 .../functional/test_external_reference.py | 0 .../tests}/functional/test_fail_fast.py | 0 .../functional/test_multiple_indexes.py | 0 .../tests}/functional/test_ref_override.py | 0 .../tests}/functional/test_relation_name.py | 0 .../tests}/functional/test_severity.py | 0 .../functional/test_store_test_failures.py | 0 .../tests}/functional/test_thread_count.py | 0 .../tests}/functional/test_timezones.py | 0 .../tests}/functional/test_types.py | 0 .../tests}/functional/test_unlogged_table.py | 0 .../functional/unit_testing/fixtures.py | 0 .../unit_testing/test_csv_fixtures.py | 0 .../functional/unit_testing/test_state.py | 0 .../unit_testing/test_unit_testing.py | 0 .../unit_testing/test_ut_dependency.py | 0 .../unit_testing/test_ut_sources.py | 0 .../tests}/functional/utils.py | 0 .../tests}/unit/test_adapter.py | 0 .../tests}/unit/test_adapter_conversions.py | 0 .../tests}/unit/test_connection.py | 0 .../tests}/unit/test_filter_catalog.py | 0 .../tests}/unit/test_materialized_view.py | 0 .../tests}/unit/test_renamed_relations.py | 0 {tests => dbt-postgres/tests}/unit/utils.py | 0 pyproject.toml | 112 ------------------ 254 files changed, 146 insertions(+), 125 deletions(-) rename {.changes => dbt-postgres/.changes}/0.0.0.md (100%) rename .changes/unreleased/.gitkeep => dbt-postgres/.changes/1.9.0-b1.md (100%) rename {.changes => dbt-postgres/.changes}/1.9.0.md (100%) rename {.changes => dbt-postgres/.changes}/header.tpl.md (100%) rename dbt/include/postgres/macros/utils/columns_spec_ddl.sql => dbt-postgres/.changes/unreleased/.gitkeep (100%) create mode 100644 dbt-postgres/.changes/unreleased/Under the Hood-20241201-122244.yaml rename .changie.yaml => dbt-postgres/.changie.yaml (100%) rename test.env.example => dbt-postgres/.env.example (100%) rename CHANGELOG.md => dbt-postgres/CHANGELOG.md (100%) rename CONTRIBUTING.md => dbt-postgres/CONTRIBUTING.md (100%) rename README.md => dbt-postgres/README.md (100%) rename {docker => dbt-postgres/docker}/Dockerfile (100%) rename {docker => dbt-postgres/docker}/README.md (100%) rename {docker => dbt-postgres/docker}/dev.Dockerfile (100%) create mode 100644 dbt-postgres/hatch.toml create mode 100644 dbt-postgres/pyproject.toml rename {.github => dbt-postgres}/scripts/psycopg2-check.sh (100%) rename {scripts => dbt-postgres/scripts}/setup_test_database.sql (100%) rename {.github => dbt-postgres}/scripts/update_dev_dependency_branches.sh (100%) rename {dbt => dbt-postgres/src/dbt}/__init__.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/__init__.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/__version__.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/column.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/connections.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/impl.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/record/__init__.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/record/cursor/cursor.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/record/cursor/status.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/record/handle.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/relation.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/relation_configs/__init__.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/relation_configs/constants.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/relation_configs/index.py (100%) rename {dbt => dbt-postgres/src/dbt}/adapters/postgres/relation_configs/materialized_view.py (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/__init__.py (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/dbt_project.yml (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/adapters.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/catalog.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/materializations/incremental_strategies.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/materializations/snapshot_merge.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/materialized_view/alter.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/materialized_view/create.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/materialized_view/describe.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/materialized_view/drop.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/materialized_view/refresh.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/materialized_view/rename.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/table/drop.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/table/rename.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/table/replace.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/view/drop.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/view/rename.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/relations/view/replace.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/timestamps.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/utils/any_value.sql (100%) rename tests/__init__.py => dbt-postgres/src/dbt/include/postgres/macros/utils/columns_spec_ddl.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/utils/dateadd.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/utils/datediff.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/utils/last_day.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/utils/listagg.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/macros/utils/split_part.sql (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/profile_template.yml (100%) rename {dbt => dbt-postgres/src/dbt}/include/postgres/sample_profiles.yml (100%) rename {tests => dbt-postgres/tests}/conftest.py (100%) rename {tests => dbt-postgres/tests}/functional/README.md (100%) rename {tests => dbt-postgres/tests}/functional/__init__.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/__init__.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_aliases.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_basic.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_caching.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_clone.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_column_types.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_concurrency.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_constraints.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_data_types.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_debug.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_empty.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_ephemeral.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_grants.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_hooks/data/seed_model.sql (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_hooks/data/seed_run.sql (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_hooks/test_hooks.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_incremental.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_incremental_microbatch.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_persist_docs.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_query_comment.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_relations.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_show.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_simple_copy.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_simple_seed/seed_bom.csv (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_simple_seed/test_simple_seed.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_simple_snapshot.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_store_test_failures.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_unit_testing.py (100%) rename {tests => dbt-postgres/tests}/functional/adapter/test_utils.py (100%) rename {tests => dbt-postgres/tests}/functional/basic/data/seed-initial.csv (100%) rename {tests => dbt-postgres/tests}/functional/basic/data/seed-update.csv (100%) rename {tests => dbt-postgres/tests}/functional/basic/data/summary_expected.csv (100%) rename {tests => dbt-postgres/tests}/functional/basic/data/summary_expected_update.csv (100%) rename {tests => dbt-postgres/tests}/functional/basic/data/varchar10_seed.sql (100%) rename {tests => dbt-postgres/tests}/functional/basic/data/varchar300_seed.sql (100%) rename {tests => dbt-postgres/tests}/functional/basic/test_basic.py (100%) rename {tests => dbt-postgres/tests}/functional/basic/test_invalid_reference.py (100%) rename {tests => dbt-postgres/tests}/functional/basic/test_jaffle_shop.py (100%) rename {tests => dbt-postgres/tests}/functional/basic/test_mixed_case_db.py (100%) rename {tests => dbt-postgres/tests}/functional/basic/test_project.py (100%) rename {tests => dbt-postgres/tests}/functional/basic/test_simple_reference.py (100%) rename {tests => dbt-postgres/tests}/functional/basic/test_varchar_widening.py (100%) rename {tests => dbt-postgres/tests}/functional/compile/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/compile/test_compile.py (100%) rename {tests => dbt-postgres/tests}/functional/conftest.py (100%) rename {tests => dbt-postgres/tests}/functional/contracts/test_contract_enforcement.py (100%) rename {tests => dbt-postgres/tests}/functional/contracts/test_contract_precision.py (100%) rename {tests => dbt-postgres/tests}/functional/contracts/test_nonstandard_data_type.py (100%) rename {tests => dbt-postgres/tests}/functional/custom_aliases/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/custom_aliases/test_custom_aliases.py (100%) rename {tests => dbt-postgres/tests}/functional/custom_singular_tests/data/seed_expected.sql (100%) rename {tests => dbt-postgres/tests}/functional/custom_singular_tests/test_custom_singular_tests.py (100%) rename {tests => dbt-postgres/tests}/functional/dbt_debug/test_dbt_debug.py (100%) rename {tests => dbt-postgres/tests}/functional/dbt_runner.py (100%) rename {tests => dbt-postgres/tests}/functional/exit_codes/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/exit_codes/test_exit_codes.py (100%) rename {tests => dbt-postgres/tests}/functional/exposures/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/exposures/test_exposure_configs.py (100%) rename {tests => dbt-postgres/tests}/functional/exposures/test_exposures.py (100%) rename {tests => dbt-postgres/tests}/functional/graph_selection/test_graph_selection.py (100%) rename {tests => dbt-postgres/tests}/functional/graph_selection/test_group_selection.py (100%) rename {tests => dbt-postgres/tests}/functional/graph_selection/test_intersection_syntax.py (100%) rename {tests => dbt-postgres/tests}/functional/graph_selection/test_schema_test_graph_selection.py (100%) rename {tests => dbt-postgres/tests}/functional/graph_selection/test_tag_selection.py (100%) rename {tests => dbt-postgres/tests}/functional/graph_selection/test_version_selection.py (100%) rename {tests => dbt-postgres/tests}/functional/incremental_schema_tests/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/incremental_schema_tests/test_incremental_schema.py (100%) rename {tests => dbt-postgres/tests}/functional/invalid_model_tests/test_invalid_models.py (100%) rename {tests => dbt-postgres/tests}/functional/invalid_model_tests/test_model_warning.py (100%) rename {tests => dbt-postgres/tests}/functional/macros/data/seed.sql (100%) rename {tests => dbt-postgres/tests}/functional/macros/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/macros/package_macro_overrides/dbt_project.yml (100%) rename {tests => dbt-postgres/tests}/functional/macros/package_macro_overrides/macros/macros.sql (100%) rename {tests => dbt-postgres/tests}/functional/macros/test_macros.py (100%) rename {tests => dbt-postgres/tests}/functional/materializations/conftest.py (100%) rename {tests => dbt-postgres/tests}/functional/materializations/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/materializations/materialized_view_tests/test_materialized_view.py (100%) rename {tests => dbt-postgres/tests}/functional/materializations/materialized_view_tests/test_postgres_materialized_view.py (100%) rename {tests => dbt-postgres/tests}/functional/materializations/materialized_view_tests/utils.py (100%) rename {tests => dbt-postgres/tests}/functional/materializations/test_incremental.py (100%) rename {tests => dbt-postgres/tests}/functional/materializations/test_runtime_materialization.py (100%) rename {tests => dbt-postgres/tests}/functional/materializations/test_supported_languages.py (100%) rename {tests => dbt-postgres/tests}/functional/postgres/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/postgres/test_indexes.py (100%) rename {tests => dbt-postgres/tests}/functional/projects/__init__.py (100%) rename {tests => dbt-postgres/tests}/functional/projects/dbt_integration/__init__.py (100%) rename {tests => dbt-postgres/tests}/functional/projects/dbt_integration/macros/do_something.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/dbt_integration/models/incremental.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/dbt_integration/models/table.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/dbt_integration/models/view.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/dbt_integration/schemas/project.yml (100%) rename {tests => dbt-postgres/tests}/functional/projects/dbt_integration/schemas/schema.yml (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/__init__.py (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/data/seed.csv (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/data/summary_expected.csv (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/alternative_users.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/base_users.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/emails.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/emails_alt.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/nested_users.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/never_selected.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/subdir.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/users.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/users_rollup.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/models/users_rollup_dependency.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/schemas/patch_path_selection.yml (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/schemas/properties.yml (100%) rename {tests => dbt-postgres/tests}/functional/projects/graph_selection/schemas/schema.yml (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/__init__.py (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/data/raw_customers.csv (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/data/raw_orders.csv (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/data/raw_payments.csv (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/docs/docs.md (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/docs/overview.md (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/models/customers.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/models/orders.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/schemas/jaffle_shop.yml (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/schemas/staging.yml (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/staging/stg_customers.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/staging/stg_orders.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/jaffle_shop/staging/stg_payments.sql (100%) rename {tests => dbt-postgres/tests}/functional/projects/utils.py (100%) rename {tests => dbt-postgres/tests}/functional/retry/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/retry/test_retry.py (100%) rename {tests => dbt-postgres/tests}/functional/schema/fixtures/macros.py (100%) rename {tests => dbt-postgres/tests}/functional/schema/fixtures/sql.py (100%) rename {tests => dbt-postgres/tests}/functional/schema/test_custom_schema.py (100%) rename {tests => dbt-postgres/tests}/functional/selected_resources/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/selected_resources/test_selected_resources.py (100%) rename {tests => dbt-postgres/tests}/functional/semantic_models/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/semantic_models/test_semantic_model_configs.py (100%) rename {tests => dbt-postgres/tests}/functional/semantic_models/test_semantic_model_parsing.py (100%) rename {tests => dbt-postgres/tests}/functional/semantic_models/test_semantic_models.py (100%) rename {tests => dbt-postgres/tests}/functional/show/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/show/test_show.py (100%) rename {tests => dbt-postgres/tests}/functional/sources/common_source_setup.py (100%) rename {tests => dbt-postgres/tests}/functional/sources/data/seed.sql (100%) rename {tests => dbt-postgres/tests}/functional/sources/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/sources/test_simple_source.py (100%) rename {tests => dbt-postgres/tests}/functional/sources/test_source_configs.py (100%) rename {tests => dbt-postgres/tests}/functional/sources/test_source_fresher_state.py (100%) rename {tests => dbt-postgres/tests}/functional/sources/test_source_freshness.py (100%) rename {tests => dbt-postgres/tests}/functional/statements/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/statements/test_statements.py (100%) rename {tests => dbt-postgres/tests}/functional/test_access.py (100%) rename {tests => dbt-postgres/tests}/functional/test_analyses.py (100%) rename {tests => dbt-postgres/tests}/functional/test_catalog.py (100%) rename {tests => dbt-postgres/tests}/functional/test_clean.py (100%) rename {tests => dbt-postgres/tests}/functional/test_colors.py (100%) rename {tests => dbt-postgres/tests}/functional/test_column_quotes.py (100%) rename {tests => dbt-postgres/tests}/functional/test_config.py (100%) rename {tests => dbt-postgres/tests}/functional/test_connection_manager.py (100%) rename {tests => dbt-postgres/tests}/functional/test_custom_target_path.py (100%) rename {tests => dbt-postgres/tests}/functional/test_cycles.py (100%) rename {tests => dbt-postgres/tests}/functional/test_default_selectors.py (100%) rename {tests => dbt-postgres/tests}/functional/test_events.py (100%) rename {tests => dbt-postgres/tests}/functional/test_external_reference.py (100%) rename {tests => dbt-postgres/tests}/functional/test_fail_fast.py (100%) rename {tests => dbt-postgres/tests}/functional/test_multiple_indexes.py (100%) rename {tests => dbt-postgres/tests}/functional/test_ref_override.py (100%) rename {tests => dbt-postgres/tests}/functional/test_relation_name.py (100%) rename {tests => dbt-postgres/tests}/functional/test_severity.py (100%) rename {tests => dbt-postgres/tests}/functional/test_store_test_failures.py (100%) rename {tests => dbt-postgres/tests}/functional/test_thread_count.py (100%) rename {tests => dbt-postgres/tests}/functional/test_timezones.py (100%) rename {tests => dbt-postgres/tests}/functional/test_types.py (100%) rename {tests => dbt-postgres/tests}/functional/test_unlogged_table.py (100%) rename {tests => dbt-postgres/tests}/functional/unit_testing/fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/unit_testing/test_csv_fixtures.py (100%) rename {tests => dbt-postgres/tests}/functional/unit_testing/test_state.py (100%) rename {tests => dbt-postgres/tests}/functional/unit_testing/test_unit_testing.py (100%) rename {tests => dbt-postgres/tests}/functional/unit_testing/test_ut_dependency.py (100%) rename {tests => dbt-postgres/tests}/functional/unit_testing/test_ut_sources.py (100%) rename {tests => dbt-postgres/tests}/functional/utils.py (100%) rename {tests => dbt-postgres/tests}/unit/test_adapter.py (100%) rename {tests => dbt-postgres/tests}/unit/test_adapter_conversions.py (100%) rename {tests => dbt-postgres/tests}/unit/test_connection.py (100%) rename {tests => dbt-postgres/tests}/unit/test_filter_catalog.py (100%) rename {tests => dbt-postgres/tests}/unit/test_materialized_view.py (100%) rename {tests => dbt-postgres/tests}/unit/test_renamed_relations.py (100%) rename {tests => dbt-postgres/tests}/unit/utils.py (100%) delete mode 100644 pyproject.toml diff --git a/.github/workflows/changelog-entry-check.yml b/.github/workflows/changelog-entry-check.yml index 889c09952..34b2a17b0 100644 --- a/.github/workflows/changelog-entry-check.yml +++ b/.github/workflows/changelog-entry-check.yml @@ -19,11 +19,7 @@ permissions: jobs: changelog-entry-check: - uses: dbt-labs/actions/.github/workflows/changelog-existence.yml@main + uses: dbt-labs/dbt-adapters/.github/workflows/_changelog-entry-check.yml@main with: - changelog_comment: >- - Thank you for your pull request! We could not find a changelog entry for this change. - For details on how to document a change, see the - [dbt-postgres contributing guide](https://github.com/dbt-labs/dbt-postgres/blob/main/CONTRIBUTING.md). - skip_label: "Skip Changelog" + pull-request: ${{ github.event.pull_request.number }} secrets: inherit diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index 1ca2ded01..3619e0dda 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -40,7 +40,8 @@ jobs: - name: Update Adapters and Core branches if: ${{ contains(github.event_name, 'workflow_') }} shell: bash - run: ./.github/scripts/update_dev_packages.sh ${{ inputs.dbt_adapters_branch }} "main" + run: scripts/update_dev_packages.sh ${{ inputs.dbt_adapters_branch }} "main" + working-directory: ./dbt-postgres - name: Setup `hatch` uses: dbt-labs/dbt-adapters/.github/actions/setup-hatch@main @@ -48,3 +49,4 @@ jobs: - name: Run code quality shell: bash run: hatch run code-quality + working-directory: ./dbt-postgres diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 304c139c0..fd7f7ca10 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -111,14 +111,16 @@ jobs: - name: Update Adapters and Core branches if: ${{ github.event_name == 'workflow_call' || github.event_name == 'workflow_dispatch'}} run: | - ./.github/scripts/update_dev_dependency_branches.sh \ + scripts/update_dev_dependency_branches.sh \ ${{ inputs.dbt_adapters_branch }} \ ${{ inputs.dbt_core_branch }} \ ${{ inputs.dbt_common_branch }} cat pyproject.toml + working-directory: ./dbt-postgres - name: Setup postgres run: psql -f ./scripts/setup_test_database.sql + working-directory: ./dbt-postgres env: PGHOST: localhost PGPORT: 5432 @@ -133,6 +135,7 @@ jobs: - name: Run integration tests run: hatch run integration-tests + working-directory: ./dbt-postgres env: POSTGRES_TEST_HOST: localhost POSTGRES_TEST_PORT: 5432 @@ -164,25 +167,29 @@ jobs: brew install postgresql - name: "Test psycopg2 name - default" - run: .github/scripts/psycopg2-check.sh + run: scripts/psycopg2-check.sh + working-directory: ./dbt-postgres env: PSYCOPG2_EXPECTED_NAME: psycopg2-binary - name: "Test psycopg2 name - invalid override" - run: .github/scripts/psycopg2-check.sh + run: scripts/psycopg2-check.sh + working-directory: ./dbt-postgres env: DBT_PSYCOPG2_NAME: rubber-baby-buggy-bumpers PSYCOPG2_EXPECTED_NAME: psycopg2-binary - name: "Test psycopg2 name - override" - run: .github/scripts/psycopg2-check.sh + run: scripts/psycopg2-check.sh + working-directory: ./dbt-postgres env: DBT_PSYCOPG2_NAME: psycopg2 PSYCOPG2_EXPECTED_NAME: psycopg2-binary # we have not implemented the hook yet, so this doesn't work - name: "Test psycopg2 name - manual override" # verify that the workaround documented in the `README.md` continues to work - run: .github/scripts/psycopg2-check.sh + run: scripts/psycopg2-check.sh + working-directory: ./dbt-postgres env: PSYCOPG2_WORKAROUND: true PSYCOPG2_EXPECTED_NAME: psycopg2 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e3a43e0b4..b10c03aa0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -32,10 +32,15 @@ concurrency: group: "${{ github.workflow }}-${{ github.event_name }}-${{ inputs.version }}-${{ inputs.deploy-to }}" cancel-in-progress: true +defaults: + run: + shell: bash + working-directory: ./dbt-postgres + jobs: release-prep: name: "Release prep: generate changelog, bump version" - uses: dbt-labs/dbt-postgres/.github/workflows/release_prep_hatch.yml@main + uses: ./.github/workflows/release_prep_hatch.yml with: branch: ${{ inputs.branch }} version: ${{ inputs.version }} diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index 5805c9821..be1a5195e 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -39,3 +39,4 @@ jobs: - name: Run unit tests run: hatch run unit-tests shell: bash + working-directory: ./dbt-postgres diff --git a/.github/workflows/version-bump.yml b/.github/workflows/version-bump.yml index bde34d683..70e01cfb8 100644 --- a/.github/workflows/version-bump.yml +++ b/.github/workflows/version-bump.yml @@ -20,6 +20,11 @@ on: description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)' required: true +defaults: + run: + shell: bash + working-directory: ./dbt-postgres + jobs: version_bump_and_changie: uses: dbt-labs/actions/.github/workflows/version-bump.yml@main diff --git a/.changes/0.0.0.md b/dbt-postgres/.changes/0.0.0.md similarity index 100% rename from .changes/0.0.0.md rename to dbt-postgres/.changes/0.0.0.md diff --git a/.changes/unreleased/.gitkeep b/dbt-postgres/.changes/1.9.0-b1.md similarity index 100% rename from .changes/unreleased/.gitkeep rename to dbt-postgres/.changes/1.9.0-b1.md diff --git a/.changes/1.9.0.md b/dbt-postgres/.changes/1.9.0.md similarity index 100% rename from .changes/1.9.0.md rename to dbt-postgres/.changes/1.9.0.md diff --git a/.changes/header.tpl.md b/dbt-postgres/.changes/header.tpl.md similarity index 100% rename from .changes/header.tpl.md rename to dbt-postgres/.changes/header.tpl.md diff --git a/dbt/include/postgres/macros/utils/columns_spec_ddl.sql b/dbt-postgres/.changes/unreleased/.gitkeep similarity index 100% rename from dbt/include/postgres/macros/utils/columns_spec_ddl.sql rename to dbt-postgres/.changes/unreleased/.gitkeep diff --git a/dbt-postgres/.changes/unreleased/Under the Hood-20241201-122244.yaml b/dbt-postgres/.changes/unreleased/Under the Hood-20241201-122244.yaml new file mode 100644 index 000000000..29cd08e4a --- /dev/null +++ b/dbt-postgres/.changes/unreleased/Under the Hood-20241201-122244.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Migrate to a monorepo in dbt-adapters +time: 2024-12-01T12:22:44.407931-05:00 +custom: + Author: mikealfare + Issue: "176" diff --git a/.changie.yaml b/dbt-postgres/.changie.yaml similarity index 100% rename from .changie.yaml rename to dbt-postgres/.changie.yaml diff --git a/test.env.example b/dbt-postgres/.env.example similarity index 100% rename from test.env.example rename to dbt-postgres/.env.example diff --git a/CHANGELOG.md b/dbt-postgres/CHANGELOG.md similarity index 100% rename from CHANGELOG.md rename to dbt-postgres/CHANGELOG.md diff --git a/CONTRIBUTING.md b/dbt-postgres/CONTRIBUTING.md similarity index 100% rename from CONTRIBUTING.md rename to dbt-postgres/CONTRIBUTING.md diff --git a/README.md b/dbt-postgres/README.md similarity index 100% rename from README.md rename to dbt-postgres/README.md diff --git a/docker/Dockerfile b/dbt-postgres/docker/Dockerfile similarity index 100% rename from docker/Dockerfile rename to dbt-postgres/docker/Dockerfile diff --git a/docker/README.md b/dbt-postgres/docker/README.md similarity index 100% rename from docker/README.md rename to dbt-postgres/docker/README.md diff --git a/docker/dev.Dockerfile b/dbt-postgres/docker/dev.Dockerfile similarity index 100% rename from docker/dev.Dockerfile rename to dbt-postgres/docker/dev.Dockerfile diff --git a/dbt-postgres/hatch.toml b/dbt-postgres/hatch.toml new file mode 100644 index 000000000..60bbb5562 --- /dev/null +++ b/dbt-postgres/hatch.toml @@ -0,0 +1,62 @@ +[version] +path = "src/dbt/adapters/postgres/__version__.py" + +[build.targets.sdist] +packages = ["src/dbt/adapters", "src/dbt/include"] +sources = ["src"] + +[build.targets.wheel] +packages = ["src/dbt/adapters", "src/dbt/include"] +sources = ["src"] + +[envs.default] +dependencies = [ + "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", + "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", + "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", + "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", + "pre-commit==3.7.0", + "freezegun", + "pytest", + "pytest-dotenv", + "pytest-mock", + "pytest-xdist", +] +[envs.default.env-vars] +DBT_TEST_USER_1 = "dbt_test_user_1" +DBT_TEST_USER_2 = "dbt_test_user_2" +DBT_TEST_USER_3 = "dbt_test_user_3" +[envs.default.scripts] +setup = "pre-commit install" +code-quality = "pre-commit run --all-files" +unit-tests = "python -m pytest {args:tests/unit}" +integration-tests = "python -m pytest {args:tests/functional}" +docker-dev = [ + "echo Does not support integration testing, only development and unit testing. See issue https://github.com/dbt-labs/dbt-postgres/issues/99", + "docker build -f docker/dev.Dockerfile -t dbt-postgres-dev .", + "docker run --rm -it --name dbt-postgres-dev -v $(pwd):/opt/code dbt-postgres-dev", +] +docker-prod = "docker build -f docker/Dockerfile -t dbt-postgres ." + +[envs.build] +detached = true +dependencies = [ + "wheel", + "twine", + "check-wheel-contents", +] +[envs.build.scripts] +check-all = [ + "- check-wheel", + "- check-sdist", +] +check-wheel = [ + "twine check dist/*", + "find ./dist/dbt_postgres-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-postgres", +] +check-sdist = [ + "check-wheel-contents dist/*.whl --ignore W007,W008", + "find ./dist/dbt_postgres-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", + "pip freeze | grep dbt-postgres", +] diff --git a/dbt-postgres/pyproject.toml b/dbt-postgres/pyproject.toml new file mode 100644 index 000000000..6f4b4604a --- /dev/null +++ b/dbt-postgres/pyproject.toml @@ -0,0 +1,49 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +dynamic = ["version"] +name = "dbt-postgres" +description = "The set of adapter protocols and base functionality that supports integration with dbt-core" +readme = "README.md" +keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs", "postgres"] +requires-python = ">=3.9.0" +authors = [ + { name = "dbt Labs", email = "info@dbtlabs.com" }, +] +maintainers = [ + { name = "dbt Labs", email = "info@dbtlabs.com" }, +] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "License :: OSI Approved :: Apache Software License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] +dependencies = [ + "psycopg2-binary>=2.9,<3.0", + "dbt-adapters>=1.7.0,<2.0", + # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency + "dbt-core>=1.8.0", + # installed via dbt-adapters but used directly + "dbt-common>=1.0.4,<2.0", + "agate>=1.0,<2.0", +] +[project.urls] +Homepage = "https://github.com/dbt-labs/dbt-postgres" +Documentation = "https://docs.getdbt.com" +Repository = "https://github.com/dbt-labs/dbt-postgres.git" +Issues = "https://github.com/dbt-labs/dbt-postgres/issues" +Changelog = "https://github.com/dbt-labs/dbt-postgres/blob/main/CHANGELOG.md" + +[tool.pytest] +testpaths = [ + "tests/functional", + "tests/unit", +] diff --git a/.github/scripts/psycopg2-check.sh b/dbt-postgres/scripts/psycopg2-check.sh similarity index 100% rename from .github/scripts/psycopg2-check.sh rename to dbt-postgres/scripts/psycopg2-check.sh diff --git a/scripts/setup_test_database.sql b/dbt-postgres/scripts/setup_test_database.sql similarity index 100% rename from scripts/setup_test_database.sql rename to dbt-postgres/scripts/setup_test_database.sql diff --git a/.github/scripts/update_dev_dependency_branches.sh b/dbt-postgres/scripts/update_dev_dependency_branches.sh similarity index 100% rename from .github/scripts/update_dev_dependency_branches.sh rename to dbt-postgres/scripts/update_dev_dependency_branches.sh diff --git a/dbt/__init__.py b/dbt-postgres/src/dbt/__init__.py similarity index 100% rename from dbt/__init__.py rename to dbt-postgres/src/dbt/__init__.py diff --git a/dbt/adapters/postgres/__init__.py b/dbt-postgres/src/dbt/adapters/postgres/__init__.py similarity index 100% rename from dbt/adapters/postgres/__init__.py rename to dbt-postgres/src/dbt/adapters/postgres/__init__.py diff --git a/dbt/adapters/postgres/__version__.py b/dbt-postgres/src/dbt/adapters/postgres/__version__.py similarity index 100% rename from dbt/adapters/postgres/__version__.py rename to dbt-postgres/src/dbt/adapters/postgres/__version__.py diff --git a/dbt/adapters/postgres/column.py b/dbt-postgres/src/dbt/adapters/postgres/column.py similarity index 100% rename from dbt/adapters/postgres/column.py rename to dbt-postgres/src/dbt/adapters/postgres/column.py diff --git a/dbt/adapters/postgres/connections.py b/dbt-postgres/src/dbt/adapters/postgres/connections.py similarity index 100% rename from dbt/adapters/postgres/connections.py rename to dbt-postgres/src/dbt/adapters/postgres/connections.py diff --git a/dbt/adapters/postgres/impl.py b/dbt-postgres/src/dbt/adapters/postgres/impl.py similarity index 100% rename from dbt/adapters/postgres/impl.py rename to dbt-postgres/src/dbt/adapters/postgres/impl.py diff --git a/dbt/adapters/postgres/record/__init__.py b/dbt-postgres/src/dbt/adapters/postgres/record/__init__.py similarity index 100% rename from dbt/adapters/postgres/record/__init__.py rename to dbt-postgres/src/dbt/adapters/postgres/record/__init__.py diff --git a/dbt/adapters/postgres/record/cursor/cursor.py b/dbt-postgres/src/dbt/adapters/postgres/record/cursor/cursor.py similarity index 100% rename from dbt/adapters/postgres/record/cursor/cursor.py rename to dbt-postgres/src/dbt/adapters/postgres/record/cursor/cursor.py diff --git a/dbt/adapters/postgres/record/cursor/status.py b/dbt-postgres/src/dbt/adapters/postgres/record/cursor/status.py similarity index 100% rename from dbt/adapters/postgres/record/cursor/status.py rename to dbt-postgres/src/dbt/adapters/postgres/record/cursor/status.py diff --git a/dbt/adapters/postgres/record/handle.py b/dbt-postgres/src/dbt/adapters/postgres/record/handle.py similarity index 100% rename from dbt/adapters/postgres/record/handle.py rename to dbt-postgres/src/dbt/adapters/postgres/record/handle.py diff --git a/dbt/adapters/postgres/relation.py b/dbt-postgres/src/dbt/adapters/postgres/relation.py similarity index 100% rename from dbt/adapters/postgres/relation.py rename to dbt-postgres/src/dbt/adapters/postgres/relation.py diff --git a/dbt/adapters/postgres/relation_configs/__init__.py b/dbt-postgres/src/dbt/adapters/postgres/relation_configs/__init__.py similarity index 100% rename from dbt/adapters/postgres/relation_configs/__init__.py rename to dbt-postgres/src/dbt/adapters/postgres/relation_configs/__init__.py diff --git a/dbt/adapters/postgres/relation_configs/constants.py b/dbt-postgres/src/dbt/adapters/postgres/relation_configs/constants.py similarity index 100% rename from dbt/adapters/postgres/relation_configs/constants.py rename to dbt-postgres/src/dbt/adapters/postgres/relation_configs/constants.py diff --git a/dbt/adapters/postgres/relation_configs/index.py b/dbt-postgres/src/dbt/adapters/postgres/relation_configs/index.py similarity index 100% rename from dbt/adapters/postgres/relation_configs/index.py rename to dbt-postgres/src/dbt/adapters/postgres/relation_configs/index.py diff --git a/dbt/adapters/postgres/relation_configs/materialized_view.py b/dbt-postgres/src/dbt/adapters/postgres/relation_configs/materialized_view.py similarity index 100% rename from dbt/adapters/postgres/relation_configs/materialized_view.py rename to dbt-postgres/src/dbt/adapters/postgres/relation_configs/materialized_view.py diff --git a/dbt/include/postgres/__init__.py b/dbt-postgres/src/dbt/include/postgres/__init__.py similarity index 100% rename from dbt/include/postgres/__init__.py rename to dbt-postgres/src/dbt/include/postgres/__init__.py diff --git a/dbt/include/postgres/dbt_project.yml b/dbt-postgres/src/dbt/include/postgres/dbt_project.yml similarity index 100% rename from dbt/include/postgres/dbt_project.yml rename to dbt-postgres/src/dbt/include/postgres/dbt_project.yml diff --git a/dbt/include/postgres/macros/adapters.sql b/dbt-postgres/src/dbt/include/postgres/macros/adapters.sql similarity index 100% rename from dbt/include/postgres/macros/adapters.sql rename to dbt-postgres/src/dbt/include/postgres/macros/adapters.sql diff --git a/dbt/include/postgres/macros/catalog.sql b/dbt-postgres/src/dbt/include/postgres/macros/catalog.sql similarity index 100% rename from dbt/include/postgres/macros/catalog.sql rename to dbt-postgres/src/dbt/include/postgres/macros/catalog.sql diff --git a/dbt/include/postgres/macros/materializations/incremental_strategies.sql b/dbt-postgres/src/dbt/include/postgres/macros/materializations/incremental_strategies.sql similarity index 100% rename from dbt/include/postgres/macros/materializations/incremental_strategies.sql rename to dbt-postgres/src/dbt/include/postgres/macros/materializations/incremental_strategies.sql diff --git a/dbt/include/postgres/macros/materializations/snapshot_merge.sql b/dbt-postgres/src/dbt/include/postgres/macros/materializations/snapshot_merge.sql similarity index 100% rename from dbt/include/postgres/macros/materializations/snapshot_merge.sql rename to dbt-postgres/src/dbt/include/postgres/macros/materializations/snapshot_merge.sql diff --git a/dbt/include/postgres/macros/relations.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations.sql similarity index 100% rename from dbt/include/postgres/macros/relations.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations.sql diff --git a/dbt/include/postgres/macros/relations/materialized_view/alter.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/alter.sql similarity index 100% rename from dbt/include/postgres/macros/relations/materialized_view/alter.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/alter.sql diff --git a/dbt/include/postgres/macros/relations/materialized_view/create.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/create.sql similarity index 100% rename from dbt/include/postgres/macros/relations/materialized_view/create.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/create.sql diff --git a/dbt/include/postgres/macros/relations/materialized_view/describe.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/describe.sql similarity index 100% rename from dbt/include/postgres/macros/relations/materialized_view/describe.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/describe.sql diff --git a/dbt/include/postgres/macros/relations/materialized_view/drop.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/drop.sql similarity index 100% rename from dbt/include/postgres/macros/relations/materialized_view/drop.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/drop.sql diff --git a/dbt/include/postgres/macros/relations/materialized_view/refresh.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/refresh.sql similarity index 100% rename from dbt/include/postgres/macros/relations/materialized_view/refresh.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/refresh.sql diff --git a/dbt/include/postgres/macros/relations/materialized_view/rename.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/rename.sql similarity index 100% rename from dbt/include/postgres/macros/relations/materialized_view/rename.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/materialized_view/rename.sql diff --git a/dbt/include/postgres/macros/relations/table/drop.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/table/drop.sql similarity index 100% rename from dbt/include/postgres/macros/relations/table/drop.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/table/drop.sql diff --git a/dbt/include/postgres/macros/relations/table/rename.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/table/rename.sql similarity index 100% rename from dbt/include/postgres/macros/relations/table/rename.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/table/rename.sql diff --git a/dbt/include/postgres/macros/relations/table/replace.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/table/replace.sql similarity index 100% rename from dbt/include/postgres/macros/relations/table/replace.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/table/replace.sql diff --git a/dbt/include/postgres/macros/relations/view/drop.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/view/drop.sql similarity index 100% rename from dbt/include/postgres/macros/relations/view/drop.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/view/drop.sql diff --git a/dbt/include/postgres/macros/relations/view/rename.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/view/rename.sql similarity index 100% rename from dbt/include/postgres/macros/relations/view/rename.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/view/rename.sql diff --git a/dbt/include/postgres/macros/relations/view/replace.sql b/dbt-postgres/src/dbt/include/postgres/macros/relations/view/replace.sql similarity index 100% rename from dbt/include/postgres/macros/relations/view/replace.sql rename to dbt-postgres/src/dbt/include/postgres/macros/relations/view/replace.sql diff --git a/dbt/include/postgres/macros/timestamps.sql b/dbt-postgres/src/dbt/include/postgres/macros/timestamps.sql similarity index 100% rename from dbt/include/postgres/macros/timestamps.sql rename to dbt-postgres/src/dbt/include/postgres/macros/timestamps.sql diff --git a/dbt/include/postgres/macros/utils/any_value.sql b/dbt-postgres/src/dbt/include/postgres/macros/utils/any_value.sql similarity index 100% rename from dbt/include/postgres/macros/utils/any_value.sql rename to dbt-postgres/src/dbt/include/postgres/macros/utils/any_value.sql diff --git a/tests/__init__.py b/dbt-postgres/src/dbt/include/postgres/macros/utils/columns_spec_ddl.sql similarity index 100% rename from tests/__init__.py rename to dbt-postgres/src/dbt/include/postgres/macros/utils/columns_spec_ddl.sql diff --git a/dbt/include/postgres/macros/utils/dateadd.sql b/dbt-postgres/src/dbt/include/postgres/macros/utils/dateadd.sql similarity index 100% rename from dbt/include/postgres/macros/utils/dateadd.sql rename to dbt-postgres/src/dbt/include/postgres/macros/utils/dateadd.sql diff --git a/dbt/include/postgres/macros/utils/datediff.sql b/dbt-postgres/src/dbt/include/postgres/macros/utils/datediff.sql similarity index 100% rename from dbt/include/postgres/macros/utils/datediff.sql rename to dbt-postgres/src/dbt/include/postgres/macros/utils/datediff.sql diff --git a/dbt/include/postgres/macros/utils/last_day.sql b/dbt-postgres/src/dbt/include/postgres/macros/utils/last_day.sql similarity index 100% rename from dbt/include/postgres/macros/utils/last_day.sql rename to dbt-postgres/src/dbt/include/postgres/macros/utils/last_day.sql diff --git a/dbt/include/postgres/macros/utils/listagg.sql b/dbt-postgres/src/dbt/include/postgres/macros/utils/listagg.sql similarity index 100% rename from dbt/include/postgres/macros/utils/listagg.sql rename to dbt-postgres/src/dbt/include/postgres/macros/utils/listagg.sql diff --git a/dbt/include/postgres/macros/utils/split_part.sql b/dbt-postgres/src/dbt/include/postgres/macros/utils/split_part.sql similarity index 100% rename from dbt/include/postgres/macros/utils/split_part.sql rename to dbt-postgres/src/dbt/include/postgres/macros/utils/split_part.sql diff --git a/dbt/include/postgres/profile_template.yml b/dbt-postgres/src/dbt/include/postgres/profile_template.yml similarity index 100% rename from dbt/include/postgres/profile_template.yml rename to dbt-postgres/src/dbt/include/postgres/profile_template.yml diff --git a/dbt/include/postgres/sample_profiles.yml b/dbt-postgres/src/dbt/include/postgres/sample_profiles.yml similarity index 100% rename from dbt/include/postgres/sample_profiles.yml rename to dbt-postgres/src/dbt/include/postgres/sample_profiles.yml diff --git a/tests/conftest.py b/dbt-postgres/tests/conftest.py similarity index 100% rename from tests/conftest.py rename to dbt-postgres/tests/conftest.py diff --git a/tests/functional/README.md b/dbt-postgres/tests/functional/README.md similarity index 100% rename from tests/functional/README.md rename to dbt-postgres/tests/functional/README.md diff --git a/tests/functional/__init__.py b/dbt-postgres/tests/functional/__init__.py similarity index 100% rename from tests/functional/__init__.py rename to dbt-postgres/tests/functional/__init__.py diff --git a/tests/functional/adapter/__init__.py b/dbt-postgres/tests/functional/adapter/__init__.py similarity index 100% rename from tests/functional/adapter/__init__.py rename to dbt-postgres/tests/functional/adapter/__init__.py diff --git a/tests/functional/adapter/test_aliases.py b/dbt-postgres/tests/functional/adapter/test_aliases.py similarity index 100% rename from tests/functional/adapter/test_aliases.py rename to dbt-postgres/tests/functional/adapter/test_aliases.py diff --git a/tests/functional/adapter/test_basic.py b/dbt-postgres/tests/functional/adapter/test_basic.py similarity index 100% rename from tests/functional/adapter/test_basic.py rename to dbt-postgres/tests/functional/adapter/test_basic.py diff --git a/tests/functional/adapter/test_caching.py b/dbt-postgres/tests/functional/adapter/test_caching.py similarity index 100% rename from tests/functional/adapter/test_caching.py rename to dbt-postgres/tests/functional/adapter/test_caching.py diff --git a/tests/functional/adapter/test_clone.py b/dbt-postgres/tests/functional/adapter/test_clone.py similarity index 100% rename from tests/functional/adapter/test_clone.py rename to dbt-postgres/tests/functional/adapter/test_clone.py diff --git a/tests/functional/adapter/test_column_types.py b/dbt-postgres/tests/functional/adapter/test_column_types.py similarity index 100% rename from tests/functional/adapter/test_column_types.py rename to dbt-postgres/tests/functional/adapter/test_column_types.py diff --git a/tests/functional/adapter/test_concurrency.py b/dbt-postgres/tests/functional/adapter/test_concurrency.py similarity index 100% rename from tests/functional/adapter/test_concurrency.py rename to dbt-postgres/tests/functional/adapter/test_concurrency.py diff --git a/tests/functional/adapter/test_constraints.py b/dbt-postgres/tests/functional/adapter/test_constraints.py similarity index 100% rename from tests/functional/adapter/test_constraints.py rename to dbt-postgres/tests/functional/adapter/test_constraints.py diff --git a/tests/functional/adapter/test_data_types.py b/dbt-postgres/tests/functional/adapter/test_data_types.py similarity index 100% rename from tests/functional/adapter/test_data_types.py rename to dbt-postgres/tests/functional/adapter/test_data_types.py diff --git a/tests/functional/adapter/test_debug.py b/dbt-postgres/tests/functional/adapter/test_debug.py similarity index 100% rename from tests/functional/adapter/test_debug.py rename to dbt-postgres/tests/functional/adapter/test_debug.py diff --git a/tests/functional/adapter/test_empty.py b/dbt-postgres/tests/functional/adapter/test_empty.py similarity index 100% rename from tests/functional/adapter/test_empty.py rename to dbt-postgres/tests/functional/adapter/test_empty.py diff --git a/tests/functional/adapter/test_ephemeral.py b/dbt-postgres/tests/functional/adapter/test_ephemeral.py similarity index 100% rename from tests/functional/adapter/test_ephemeral.py rename to dbt-postgres/tests/functional/adapter/test_ephemeral.py diff --git a/tests/functional/adapter/test_grants.py b/dbt-postgres/tests/functional/adapter/test_grants.py similarity index 100% rename from tests/functional/adapter/test_grants.py rename to dbt-postgres/tests/functional/adapter/test_grants.py diff --git a/tests/functional/adapter/test_hooks/data/seed_model.sql b/dbt-postgres/tests/functional/adapter/test_hooks/data/seed_model.sql similarity index 100% rename from tests/functional/adapter/test_hooks/data/seed_model.sql rename to dbt-postgres/tests/functional/adapter/test_hooks/data/seed_model.sql diff --git a/tests/functional/adapter/test_hooks/data/seed_run.sql b/dbt-postgres/tests/functional/adapter/test_hooks/data/seed_run.sql similarity index 100% rename from tests/functional/adapter/test_hooks/data/seed_run.sql rename to dbt-postgres/tests/functional/adapter/test_hooks/data/seed_run.sql diff --git a/tests/functional/adapter/test_hooks/test_hooks.py b/dbt-postgres/tests/functional/adapter/test_hooks/test_hooks.py similarity index 100% rename from tests/functional/adapter/test_hooks/test_hooks.py rename to dbt-postgres/tests/functional/adapter/test_hooks/test_hooks.py diff --git a/tests/functional/adapter/test_incremental.py b/dbt-postgres/tests/functional/adapter/test_incremental.py similarity index 100% rename from tests/functional/adapter/test_incremental.py rename to dbt-postgres/tests/functional/adapter/test_incremental.py diff --git a/tests/functional/adapter/test_incremental_microbatch.py b/dbt-postgres/tests/functional/adapter/test_incremental_microbatch.py similarity index 100% rename from tests/functional/adapter/test_incremental_microbatch.py rename to dbt-postgres/tests/functional/adapter/test_incremental_microbatch.py diff --git a/tests/functional/adapter/test_persist_docs.py b/dbt-postgres/tests/functional/adapter/test_persist_docs.py similarity index 100% rename from tests/functional/adapter/test_persist_docs.py rename to dbt-postgres/tests/functional/adapter/test_persist_docs.py diff --git a/tests/functional/adapter/test_query_comment.py b/dbt-postgres/tests/functional/adapter/test_query_comment.py similarity index 100% rename from tests/functional/adapter/test_query_comment.py rename to dbt-postgres/tests/functional/adapter/test_query_comment.py diff --git a/tests/functional/adapter/test_relations.py b/dbt-postgres/tests/functional/adapter/test_relations.py similarity index 100% rename from tests/functional/adapter/test_relations.py rename to dbt-postgres/tests/functional/adapter/test_relations.py diff --git a/tests/functional/adapter/test_show.py b/dbt-postgres/tests/functional/adapter/test_show.py similarity index 100% rename from tests/functional/adapter/test_show.py rename to dbt-postgres/tests/functional/adapter/test_show.py diff --git a/tests/functional/adapter/test_simple_copy.py b/dbt-postgres/tests/functional/adapter/test_simple_copy.py similarity index 100% rename from tests/functional/adapter/test_simple_copy.py rename to dbt-postgres/tests/functional/adapter/test_simple_copy.py diff --git a/tests/functional/adapter/test_simple_seed/seed_bom.csv b/dbt-postgres/tests/functional/adapter/test_simple_seed/seed_bom.csv similarity index 100% rename from tests/functional/adapter/test_simple_seed/seed_bom.csv rename to dbt-postgres/tests/functional/adapter/test_simple_seed/seed_bom.csv diff --git a/tests/functional/adapter/test_simple_seed/test_simple_seed.py b/dbt-postgres/tests/functional/adapter/test_simple_seed/test_simple_seed.py similarity index 100% rename from tests/functional/adapter/test_simple_seed/test_simple_seed.py rename to dbt-postgres/tests/functional/adapter/test_simple_seed/test_simple_seed.py diff --git a/tests/functional/adapter/test_simple_snapshot.py b/dbt-postgres/tests/functional/adapter/test_simple_snapshot.py similarity index 100% rename from tests/functional/adapter/test_simple_snapshot.py rename to dbt-postgres/tests/functional/adapter/test_simple_snapshot.py diff --git a/tests/functional/adapter/test_store_test_failures.py b/dbt-postgres/tests/functional/adapter/test_store_test_failures.py similarity index 100% rename from tests/functional/adapter/test_store_test_failures.py rename to dbt-postgres/tests/functional/adapter/test_store_test_failures.py diff --git a/tests/functional/adapter/test_unit_testing.py b/dbt-postgres/tests/functional/adapter/test_unit_testing.py similarity index 100% rename from tests/functional/adapter/test_unit_testing.py rename to dbt-postgres/tests/functional/adapter/test_unit_testing.py diff --git a/tests/functional/adapter/test_utils.py b/dbt-postgres/tests/functional/adapter/test_utils.py similarity index 100% rename from tests/functional/adapter/test_utils.py rename to dbt-postgres/tests/functional/adapter/test_utils.py diff --git a/tests/functional/basic/data/seed-initial.csv b/dbt-postgres/tests/functional/basic/data/seed-initial.csv similarity index 100% rename from tests/functional/basic/data/seed-initial.csv rename to dbt-postgres/tests/functional/basic/data/seed-initial.csv diff --git a/tests/functional/basic/data/seed-update.csv b/dbt-postgres/tests/functional/basic/data/seed-update.csv similarity index 100% rename from tests/functional/basic/data/seed-update.csv rename to dbt-postgres/tests/functional/basic/data/seed-update.csv diff --git a/tests/functional/basic/data/summary_expected.csv b/dbt-postgres/tests/functional/basic/data/summary_expected.csv similarity index 100% rename from tests/functional/basic/data/summary_expected.csv rename to dbt-postgres/tests/functional/basic/data/summary_expected.csv diff --git a/tests/functional/basic/data/summary_expected_update.csv b/dbt-postgres/tests/functional/basic/data/summary_expected_update.csv similarity index 100% rename from tests/functional/basic/data/summary_expected_update.csv rename to dbt-postgres/tests/functional/basic/data/summary_expected_update.csv diff --git a/tests/functional/basic/data/varchar10_seed.sql b/dbt-postgres/tests/functional/basic/data/varchar10_seed.sql similarity index 100% rename from tests/functional/basic/data/varchar10_seed.sql rename to dbt-postgres/tests/functional/basic/data/varchar10_seed.sql diff --git a/tests/functional/basic/data/varchar300_seed.sql b/dbt-postgres/tests/functional/basic/data/varchar300_seed.sql similarity index 100% rename from tests/functional/basic/data/varchar300_seed.sql rename to dbt-postgres/tests/functional/basic/data/varchar300_seed.sql diff --git a/tests/functional/basic/test_basic.py b/dbt-postgres/tests/functional/basic/test_basic.py similarity index 100% rename from tests/functional/basic/test_basic.py rename to dbt-postgres/tests/functional/basic/test_basic.py diff --git a/tests/functional/basic/test_invalid_reference.py b/dbt-postgres/tests/functional/basic/test_invalid_reference.py similarity index 100% rename from tests/functional/basic/test_invalid_reference.py rename to dbt-postgres/tests/functional/basic/test_invalid_reference.py diff --git a/tests/functional/basic/test_jaffle_shop.py b/dbt-postgres/tests/functional/basic/test_jaffle_shop.py similarity index 100% rename from tests/functional/basic/test_jaffle_shop.py rename to dbt-postgres/tests/functional/basic/test_jaffle_shop.py diff --git a/tests/functional/basic/test_mixed_case_db.py b/dbt-postgres/tests/functional/basic/test_mixed_case_db.py similarity index 100% rename from tests/functional/basic/test_mixed_case_db.py rename to dbt-postgres/tests/functional/basic/test_mixed_case_db.py diff --git a/tests/functional/basic/test_project.py b/dbt-postgres/tests/functional/basic/test_project.py similarity index 100% rename from tests/functional/basic/test_project.py rename to dbt-postgres/tests/functional/basic/test_project.py diff --git a/tests/functional/basic/test_simple_reference.py b/dbt-postgres/tests/functional/basic/test_simple_reference.py similarity index 100% rename from tests/functional/basic/test_simple_reference.py rename to dbt-postgres/tests/functional/basic/test_simple_reference.py diff --git a/tests/functional/basic/test_varchar_widening.py b/dbt-postgres/tests/functional/basic/test_varchar_widening.py similarity index 100% rename from tests/functional/basic/test_varchar_widening.py rename to dbt-postgres/tests/functional/basic/test_varchar_widening.py diff --git a/tests/functional/compile/fixtures.py b/dbt-postgres/tests/functional/compile/fixtures.py similarity index 100% rename from tests/functional/compile/fixtures.py rename to dbt-postgres/tests/functional/compile/fixtures.py diff --git a/tests/functional/compile/test_compile.py b/dbt-postgres/tests/functional/compile/test_compile.py similarity index 100% rename from tests/functional/compile/test_compile.py rename to dbt-postgres/tests/functional/compile/test_compile.py diff --git a/tests/functional/conftest.py b/dbt-postgres/tests/functional/conftest.py similarity index 100% rename from tests/functional/conftest.py rename to dbt-postgres/tests/functional/conftest.py diff --git a/tests/functional/contracts/test_contract_enforcement.py b/dbt-postgres/tests/functional/contracts/test_contract_enforcement.py similarity index 100% rename from tests/functional/contracts/test_contract_enforcement.py rename to dbt-postgres/tests/functional/contracts/test_contract_enforcement.py diff --git a/tests/functional/contracts/test_contract_precision.py b/dbt-postgres/tests/functional/contracts/test_contract_precision.py similarity index 100% rename from tests/functional/contracts/test_contract_precision.py rename to dbt-postgres/tests/functional/contracts/test_contract_precision.py diff --git a/tests/functional/contracts/test_nonstandard_data_type.py b/dbt-postgres/tests/functional/contracts/test_nonstandard_data_type.py similarity index 100% rename from tests/functional/contracts/test_nonstandard_data_type.py rename to dbt-postgres/tests/functional/contracts/test_nonstandard_data_type.py diff --git a/tests/functional/custom_aliases/fixtures.py b/dbt-postgres/tests/functional/custom_aliases/fixtures.py similarity index 100% rename from tests/functional/custom_aliases/fixtures.py rename to dbt-postgres/tests/functional/custom_aliases/fixtures.py diff --git a/tests/functional/custom_aliases/test_custom_aliases.py b/dbt-postgres/tests/functional/custom_aliases/test_custom_aliases.py similarity index 100% rename from tests/functional/custom_aliases/test_custom_aliases.py rename to dbt-postgres/tests/functional/custom_aliases/test_custom_aliases.py diff --git a/tests/functional/custom_singular_tests/data/seed_expected.sql b/dbt-postgres/tests/functional/custom_singular_tests/data/seed_expected.sql similarity index 100% rename from tests/functional/custom_singular_tests/data/seed_expected.sql rename to dbt-postgres/tests/functional/custom_singular_tests/data/seed_expected.sql diff --git a/tests/functional/custom_singular_tests/test_custom_singular_tests.py b/dbt-postgres/tests/functional/custom_singular_tests/test_custom_singular_tests.py similarity index 100% rename from tests/functional/custom_singular_tests/test_custom_singular_tests.py rename to dbt-postgres/tests/functional/custom_singular_tests/test_custom_singular_tests.py diff --git a/tests/functional/dbt_debug/test_dbt_debug.py b/dbt-postgres/tests/functional/dbt_debug/test_dbt_debug.py similarity index 100% rename from tests/functional/dbt_debug/test_dbt_debug.py rename to dbt-postgres/tests/functional/dbt_debug/test_dbt_debug.py diff --git a/tests/functional/dbt_runner.py b/dbt-postgres/tests/functional/dbt_runner.py similarity index 100% rename from tests/functional/dbt_runner.py rename to dbt-postgres/tests/functional/dbt_runner.py diff --git a/tests/functional/exit_codes/fixtures.py b/dbt-postgres/tests/functional/exit_codes/fixtures.py similarity index 100% rename from tests/functional/exit_codes/fixtures.py rename to dbt-postgres/tests/functional/exit_codes/fixtures.py diff --git a/tests/functional/exit_codes/test_exit_codes.py b/dbt-postgres/tests/functional/exit_codes/test_exit_codes.py similarity index 100% rename from tests/functional/exit_codes/test_exit_codes.py rename to dbt-postgres/tests/functional/exit_codes/test_exit_codes.py diff --git a/tests/functional/exposures/fixtures.py b/dbt-postgres/tests/functional/exposures/fixtures.py similarity index 100% rename from tests/functional/exposures/fixtures.py rename to dbt-postgres/tests/functional/exposures/fixtures.py diff --git a/tests/functional/exposures/test_exposure_configs.py b/dbt-postgres/tests/functional/exposures/test_exposure_configs.py similarity index 100% rename from tests/functional/exposures/test_exposure_configs.py rename to dbt-postgres/tests/functional/exposures/test_exposure_configs.py diff --git a/tests/functional/exposures/test_exposures.py b/dbt-postgres/tests/functional/exposures/test_exposures.py similarity index 100% rename from tests/functional/exposures/test_exposures.py rename to dbt-postgres/tests/functional/exposures/test_exposures.py diff --git a/tests/functional/graph_selection/test_graph_selection.py b/dbt-postgres/tests/functional/graph_selection/test_graph_selection.py similarity index 100% rename from tests/functional/graph_selection/test_graph_selection.py rename to dbt-postgres/tests/functional/graph_selection/test_graph_selection.py diff --git a/tests/functional/graph_selection/test_group_selection.py b/dbt-postgres/tests/functional/graph_selection/test_group_selection.py similarity index 100% rename from tests/functional/graph_selection/test_group_selection.py rename to dbt-postgres/tests/functional/graph_selection/test_group_selection.py diff --git a/tests/functional/graph_selection/test_intersection_syntax.py b/dbt-postgres/tests/functional/graph_selection/test_intersection_syntax.py similarity index 100% rename from tests/functional/graph_selection/test_intersection_syntax.py rename to dbt-postgres/tests/functional/graph_selection/test_intersection_syntax.py diff --git a/tests/functional/graph_selection/test_schema_test_graph_selection.py b/dbt-postgres/tests/functional/graph_selection/test_schema_test_graph_selection.py similarity index 100% rename from tests/functional/graph_selection/test_schema_test_graph_selection.py rename to dbt-postgres/tests/functional/graph_selection/test_schema_test_graph_selection.py diff --git a/tests/functional/graph_selection/test_tag_selection.py b/dbt-postgres/tests/functional/graph_selection/test_tag_selection.py similarity index 100% rename from tests/functional/graph_selection/test_tag_selection.py rename to dbt-postgres/tests/functional/graph_selection/test_tag_selection.py diff --git a/tests/functional/graph_selection/test_version_selection.py b/dbt-postgres/tests/functional/graph_selection/test_version_selection.py similarity index 100% rename from tests/functional/graph_selection/test_version_selection.py rename to dbt-postgres/tests/functional/graph_selection/test_version_selection.py diff --git a/tests/functional/incremental_schema_tests/fixtures.py b/dbt-postgres/tests/functional/incremental_schema_tests/fixtures.py similarity index 100% rename from tests/functional/incremental_schema_tests/fixtures.py rename to dbt-postgres/tests/functional/incremental_schema_tests/fixtures.py diff --git a/tests/functional/incremental_schema_tests/test_incremental_schema.py b/dbt-postgres/tests/functional/incremental_schema_tests/test_incremental_schema.py similarity index 100% rename from tests/functional/incremental_schema_tests/test_incremental_schema.py rename to dbt-postgres/tests/functional/incremental_schema_tests/test_incremental_schema.py diff --git a/tests/functional/invalid_model_tests/test_invalid_models.py b/dbt-postgres/tests/functional/invalid_model_tests/test_invalid_models.py similarity index 100% rename from tests/functional/invalid_model_tests/test_invalid_models.py rename to dbt-postgres/tests/functional/invalid_model_tests/test_invalid_models.py diff --git a/tests/functional/invalid_model_tests/test_model_warning.py b/dbt-postgres/tests/functional/invalid_model_tests/test_model_warning.py similarity index 100% rename from tests/functional/invalid_model_tests/test_model_warning.py rename to dbt-postgres/tests/functional/invalid_model_tests/test_model_warning.py diff --git a/tests/functional/macros/data/seed.sql b/dbt-postgres/tests/functional/macros/data/seed.sql similarity index 100% rename from tests/functional/macros/data/seed.sql rename to dbt-postgres/tests/functional/macros/data/seed.sql diff --git a/tests/functional/macros/fixtures.py b/dbt-postgres/tests/functional/macros/fixtures.py similarity index 100% rename from tests/functional/macros/fixtures.py rename to dbt-postgres/tests/functional/macros/fixtures.py diff --git a/tests/functional/macros/package_macro_overrides/dbt_project.yml b/dbt-postgres/tests/functional/macros/package_macro_overrides/dbt_project.yml similarity index 100% rename from tests/functional/macros/package_macro_overrides/dbt_project.yml rename to dbt-postgres/tests/functional/macros/package_macro_overrides/dbt_project.yml diff --git a/tests/functional/macros/package_macro_overrides/macros/macros.sql b/dbt-postgres/tests/functional/macros/package_macro_overrides/macros/macros.sql similarity index 100% rename from tests/functional/macros/package_macro_overrides/macros/macros.sql rename to dbt-postgres/tests/functional/macros/package_macro_overrides/macros/macros.sql diff --git a/tests/functional/macros/test_macros.py b/dbt-postgres/tests/functional/macros/test_macros.py similarity index 100% rename from tests/functional/macros/test_macros.py rename to dbt-postgres/tests/functional/macros/test_macros.py diff --git a/tests/functional/materializations/conftest.py b/dbt-postgres/tests/functional/materializations/conftest.py similarity index 100% rename from tests/functional/materializations/conftest.py rename to dbt-postgres/tests/functional/materializations/conftest.py diff --git a/tests/functional/materializations/fixtures.py b/dbt-postgres/tests/functional/materializations/fixtures.py similarity index 100% rename from tests/functional/materializations/fixtures.py rename to dbt-postgres/tests/functional/materializations/fixtures.py diff --git a/tests/functional/materializations/materialized_view_tests/test_materialized_view.py b/dbt-postgres/tests/functional/materializations/materialized_view_tests/test_materialized_view.py similarity index 100% rename from tests/functional/materializations/materialized_view_tests/test_materialized_view.py rename to dbt-postgres/tests/functional/materializations/materialized_view_tests/test_materialized_view.py diff --git a/tests/functional/materializations/materialized_view_tests/test_postgres_materialized_view.py b/dbt-postgres/tests/functional/materializations/materialized_view_tests/test_postgres_materialized_view.py similarity index 100% rename from tests/functional/materializations/materialized_view_tests/test_postgres_materialized_view.py rename to dbt-postgres/tests/functional/materializations/materialized_view_tests/test_postgres_materialized_view.py diff --git a/tests/functional/materializations/materialized_view_tests/utils.py b/dbt-postgres/tests/functional/materializations/materialized_view_tests/utils.py similarity index 100% rename from tests/functional/materializations/materialized_view_tests/utils.py rename to dbt-postgres/tests/functional/materializations/materialized_view_tests/utils.py diff --git a/tests/functional/materializations/test_incremental.py b/dbt-postgres/tests/functional/materializations/test_incremental.py similarity index 100% rename from tests/functional/materializations/test_incremental.py rename to dbt-postgres/tests/functional/materializations/test_incremental.py diff --git a/tests/functional/materializations/test_runtime_materialization.py b/dbt-postgres/tests/functional/materializations/test_runtime_materialization.py similarity index 100% rename from tests/functional/materializations/test_runtime_materialization.py rename to dbt-postgres/tests/functional/materializations/test_runtime_materialization.py diff --git a/tests/functional/materializations/test_supported_languages.py b/dbt-postgres/tests/functional/materializations/test_supported_languages.py similarity index 100% rename from tests/functional/materializations/test_supported_languages.py rename to dbt-postgres/tests/functional/materializations/test_supported_languages.py diff --git a/tests/functional/postgres/fixtures.py b/dbt-postgres/tests/functional/postgres/fixtures.py similarity index 100% rename from tests/functional/postgres/fixtures.py rename to dbt-postgres/tests/functional/postgres/fixtures.py diff --git a/tests/functional/postgres/test_indexes.py b/dbt-postgres/tests/functional/postgres/test_indexes.py similarity index 100% rename from tests/functional/postgres/test_indexes.py rename to dbt-postgres/tests/functional/postgres/test_indexes.py diff --git a/tests/functional/projects/__init__.py b/dbt-postgres/tests/functional/projects/__init__.py similarity index 100% rename from tests/functional/projects/__init__.py rename to dbt-postgres/tests/functional/projects/__init__.py diff --git a/tests/functional/projects/dbt_integration/__init__.py b/dbt-postgres/tests/functional/projects/dbt_integration/__init__.py similarity index 100% rename from tests/functional/projects/dbt_integration/__init__.py rename to dbt-postgres/tests/functional/projects/dbt_integration/__init__.py diff --git a/tests/functional/projects/dbt_integration/macros/do_something.sql b/dbt-postgres/tests/functional/projects/dbt_integration/macros/do_something.sql similarity index 100% rename from tests/functional/projects/dbt_integration/macros/do_something.sql rename to dbt-postgres/tests/functional/projects/dbt_integration/macros/do_something.sql diff --git a/tests/functional/projects/dbt_integration/models/incremental.sql b/dbt-postgres/tests/functional/projects/dbt_integration/models/incremental.sql similarity index 100% rename from tests/functional/projects/dbt_integration/models/incremental.sql rename to dbt-postgres/tests/functional/projects/dbt_integration/models/incremental.sql diff --git a/tests/functional/projects/dbt_integration/models/table.sql b/dbt-postgres/tests/functional/projects/dbt_integration/models/table.sql similarity index 100% rename from tests/functional/projects/dbt_integration/models/table.sql rename to dbt-postgres/tests/functional/projects/dbt_integration/models/table.sql diff --git a/tests/functional/projects/dbt_integration/models/view.sql b/dbt-postgres/tests/functional/projects/dbt_integration/models/view.sql similarity index 100% rename from tests/functional/projects/dbt_integration/models/view.sql rename to dbt-postgres/tests/functional/projects/dbt_integration/models/view.sql diff --git a/tests/functional/projects/dbt_integration/schemas/project.yml b/dbt-postgres/tests/functional/projects/dbt_integration/schemas/project.yml similarity index 100% rename from tests/functional/projects/dbt_integration/schemas/project.yml rename to dbt-postgres/tests/functional/projects/dbt_integration/schemas/project.yml diff --git a/tests/functional/projects/dbt_integration/schemas/schema.yml b/dbt-postgres/tests/functional/projects/dbt_integration/schemas/schema.yml similarity index 100% rename from tests/functional/projects/dbt_integration/schemas/schema.yml rename to dbt-postgres/tests/functional/projects/dbt_integration/schemas/schema.yml diff --git a/tests/functional/projects/graph_selection/__init__.py b/dbt-postgres/tests/functional/projects/graph_selection/__init__.py similarity index 100% rename from tests/functional/projects/graph_selection/__init__.py rename to dbt-postgres/tests/functional/projects/graph_selection/__init__.py diff --git a/tests/functional/projects/graph_selection/data/seed.csv b/dbt-postgres/tests/functional/projects/graph_selection/data/seed.csv similarity index 100% rename from tests/functional/projects/graph_selection/data/seed.csv rename to dbt-postgres/tests/functional/projects/graph_selection/data/seed.csv diff --git a/tests/functional/projects/graph_selection/data/summary_expected.csv b/dbt-postgres/tests/functional/projects/graph_selection/data/summary_expected.csv similarity index 100% rename from tests/functional/projects/graph_selection/data/summary_expected.csv rename to dbt-postgres/tests/functional/projects/graph_selection/data/summary_expected.csv diff --git a/tests/functional/projects/graph_selection/models/alternative_users.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/alternative_users.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/alternative_users.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/alternative_users.sql diff --git a/tests/functional/projects/graph_selection/models/base_users.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/base_users.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/base_users.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/base_users.sql diff --git a/tests/functional/projects/graph_selection/models/emails.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/emails.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/emails.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/emails.sql diff --git a/tests/functional/projects/graph_selection/models/emails_alt.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/emails_alt.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/emails_alt.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/emails_alt.sql diff --git a/tests/functional/projects/graph_selection/models/nested_users.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/nested_users.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/nested_users.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/nested_users.sql diff --git a/tests/functional/projects/graph_selection/models/never_selected.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/never_selected.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/never_selected.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/never_selected.sql diff --git a/tests/functional/projects/graph_selection/models/subdir.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/subdir.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/subdir.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/subdir.sql diff --git a/tests/functional/projects/graph_selection/models/users.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/users.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/users.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/users.sql diff --git a/tests/functional/projects/graph_selection/models/users_rollup.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/users_rollup.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/users_rollup.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/users_rollup.sql diff --git a/tests/functional/projects/graph_selection/models/users_rollup_dependency.sql b/dbt-postgres/tests/functional/projects/graph_selection/models/users_rollup_dependency.sql similarity index 100% rename from tests/functional/projects/graph_selection/models/users_rollup_dependency.sql rename to dbt-postgres/tests/functional/projects/graph_selection/models/users_rollup_dependency.sql diff --git a/tests/functional/projects/graph_selection/schemas/patch_path_selection.yml b/dbt-postgres/tests/functional/projects/graph_selection/schemas/patch_path_selection.yml similarity index 100% rename from tests/functional/projects/graph_selection/schemas/patch_path_selection.yml rename to dbt-postgres/tests/functional/projects/graph_selection/schemas/patch_path_selection.yml diff --git a/tests/functional/projects/graph_selection/schemas/properties.yml b/dbt-postgres/tests/functional/projects/graph_selection/schemas/properties.yml similarity index 100% rename from tests/functional/projects/graph_selection/schemas/properties.yml rename to dbt-postgres/tests/functional/projects/graph_selection/schemas/properties.yml diff --git a/tests/functional/projects/graph_selection/schemas/schema.yml b/dbt-postgres/tests/functional/projects/graph_selection/schemas/schema.yml similarity index 100% rename from tests/functional/projects/graph_selection/schemas/schema.yml rename to dbt-postgres/tests/functional/projects/graph_selection/schemas/schema.yml diff --git a/tests/functional/projects/jaffle_shop/__init__.py b/dbt-postgres/tests/functional/projects/jaffle_shop/__init__.py similarity index 100% rename from tests/functional/projects/jaffle_shop/__init__.py rename to dbt-postgres/tests/functional/projects/jaffle_shop/__init__.py diff --git a/tests/functional/projects/jaffle_shop/data/raw_customers.csv b/dbt-postgres/tests/functional/projects/jaffle_shop/data/raw_customers.csv similarity index 100% rename from tests/functional/projects/jaffle_shop/data/raw_customers.csv rename to dbt-postgres/tests/functional/projects/jaffle_shop/data/raw_customers.csv diff --git a/tests/functional/projects/jaffle_shop/data/raw_orders.csv b/dbt-postgres/tests/functional/projects/jaffle_shop/data/raw_orders.csv similarity index 100% rename from tests/functional/projects/jaffle_shop/data/raw_orders.csv rename to dbt-postgres/tests/functional/projects/jaffle_shop/data/raw_orders.csv diff --git a/tests/functional/projects/jaffle_shop/data/raw_payments.csv b/dbt-postgres/tests/functional/projects/jaffle_shop/data/raw_payments.csv similarity index 100% rename from tests/functional/projects/jaffle_shop/data/raw_payments.csv rename to dbt-postgres/tests/functional/projects/jaffle_shop/data/raw_payments.csv diff --git a/tests/functional/projects/jaffle_shop/docs/docs.md b/dbt-postgres/tests/functional/projects/jaffle_shop/docs/docs.md similarity index 100% rename from tests/functional/projects/jaffle_shop/docs/docs.md rename to dbt-postgres/tests/functional/projects/jaffle_shop/docs/docs.md diff --git a/tests/functional/projects/jaffle_shop/docs/overview.md b/dbt-postgres/tests/functional/projects/jaffle_shop/docs/overview.md similarity index 100% rename from tests/functional/projects/jaffle_shop/docs/overview.md rename to dbt-postgres/tests/functional/projects/jaffle_shop/docs/overview.md diff --git a/tests/functional/projects/jaffle_shop/models/customers.sql b/dbt-postgres/tests/functional/projects/jaffle_shop/models/customers.sql similarity index 100% rename from tests/functional/projects/jaffle_shop/models/customers.sql rename to dbt-postgres/tests/functional/projects/jaffle_shop/models/customers.sql diff --git a/tests/functional/projects/jaffle_shop/models/orders.sql b/dbt-postgres/tests/functional/projects/jaffle_shop/models/orders.sql similarity index 100% rename from tests/functional/projects/jaffle_shop/models/orders.sql rename to dbt-postgres/tests/functional/projects/jaffle_shop/models/orders.sql diff --git a/tests/functional/projects/jaffle_shop/schemas/jaffle_shop.yml b/dbt-postgres/tests/functional/projects/jaffle_shop/schemas/jaffle_shop.yml similarity index 100% rename from tests/functional/projects/jaffle_shop/schemas/jaffle_shop.yml rename to dbt-postgres/tests/functional/projects/jaffle_shop/schemas/jaffle_shop.yml diff --git a/tests/functional/projects/jaffle_shop/schemas/staging.yml b/dbt-postgres/tests/functional/projects/jaffle_shop/schemas/staging.yml similarity index 100% rename from tests/functional/projects/jaffle_shop/schemas/staging.yml rename to dbt-postgres/tests/functional/projects/jaffle_shop/schemas/staging.yml diff --git a/tests/functional/projects/jaffle_shop/staging/stg_customers.sql b/dbt-postgres/tests/functional/projects/jaffle_shop/staging/stg_customers.sql similarity index 100% rename from tests/functional/projects/jaffle_shop/staging/stg_customers.sql rename to dbt-postgres/tests/functional/projects/jaffle_shop/staging/stg_customers.sql diff --git a/tests/functional/projects/jaffle_shop/staging/stg_orders.sql b/dbt-postgres/tests/functional/projects/jaffle_shop/staging/stg_orders.sql similarity index 100% rename from tests/functional/projects/jaffle_shop/staging/stg_orders.sql rename to dbt-postgres/tests/functional/projects/jaffle_shop/staging/stg_orders.sql diff --git a/tests/functional/projects/jaffle_shop/staging/stg_payments.sql b/dbt-postgres/tests/functional/projects/jaffle_shop/staging/stg_payments.sql similarity index 100% rename from tests/functional/projects/jaffle_shop/staging/stg_payments.sql rename to dbt-postgres/tests/functional/projects/jaffle_shop/staging/stg_payments.sql diff --git a/tests/functional/projects/utils.py b/dbt-postgres/tests/functional/projects/utils.py similarity index 100% rename from tests/functional/projects/utils.py rename to dbt-postgres/tests/functional/projects/utils.py diff --git a/tests/functional/retry/fixtures.py b/dbt-postgres/tests/functional/retry/fixtures.py similarity index 100% rename from tests/functional/retry/fixtures.py rename to dbt-postgres/tests/functional/retry/fixtures.py diff --git a/tests/functional/retry/test_retry.py b/dbt-postgres/tests/functional/retry/test_retry.py similarity index 100% rename from tests/functional/retry/test_retry.py rename to dbt-postgres/tests/functional/retry/test_retry.py diff --git a/tests/functional/schema/fixtures/macros.py b/dbt-postgres/tests/functional/schema/fixtures/macros.py similarity index 100% rename from tests/functional/schema/fixtures/macros.py rename to dbt-postgres/tests/functional/schema/fixtures/macros.py diff --git a/tests/functional/schema/fixtures/sql.py b/dbt-postgres/tests/functional/schema/fixtures/sql.py similarity index 100% rename from tests/functional/schema/fixtures/sql.py rename to dbt-postgres/tests/functional/schema/fixtures/sql.py diff --git a/tests/functional/schema/test_custom_schema.py b/dbt-postgres/tests/functional/schema/test_custom_schema.py similarity index 100% rename from tests/functional/schema/test_custom_schema.py rename to dbt-postgres/tests/functional/schema/test_custom_schema.py diff --git a/tests/functional/selected_resources/fixtures.py b/dbt-postgres/tests/functional/selected_resources/fixtures.py similarity index 100% rename from tests/functional/selected_resources/fixtures.py rename to dbt-postgres/tests/functional/selected_resources/fixtures.py diff --git a/tests/functional/selected_resources/test_selected_resources.py b/dbt-postgres/tests/functional/selected_resources/test_selected_resources.py similarity index 100% rename from tests/functional/selected_resources/test_selected_resources.py rename to dbt-postgres/tests/functional/selected_resources/test_selected_resources.py diff --git a/tests/functional/semantic_models/fixtures.py b/dbt-postgres/tests/functional/semantic_models/fixtures.py similarity index 100% rename from tests/functional/semantic_models/fixtures.py rename to dbt-postgres/tests/functional/semantic_models/fixtures.py diff --git a/tests/functional/semantic_models/test_semantic_model_configs.py b/dbt-postgres/tests/functional/semantic_models/test_semantic_model_configs.py similarity index 100% rename from tests/functional/semantic_models/test_semantic_model_configs.py rename to dbt-postgres/tests/functional/semantic_models/test_semantic_model_configs.py diff --git a/tests/functional/semantic_models/test_semantic_model_parsing.py b/dbt-postgres/tests/functional/semantic_models/test_semantic_model_parsing.py similarity index 100% rename from tests/functional/semantic_models/test_semantic_model_parsing.py rename to dbt-postgres/tests/functional/semantic_models/test_semantic_model_parsing.py diff --git a/tests/functional/semantic_models/test_semantic_models.py b/dbt-postgres/tests/functional/semantic_models/test_semantic_models.py similarity index 100% rename from tests/functional/semantic_models/test_semantic_models.py rename to dbt-postgres/tests/functional/semantic_models/test_semantic_models.py diff --git a/tests/functional/show/fixtures.py b/dbt-postgres/tests/functional/show/fixtures.py similarity index 100% rename from tests/functional/show/fixtures.py rename to dbt-postgres/tests/functional/show/fixtures.py diff --git a/tests/functional/show/test_show.py b/dbt-postgres/tests/functional/show/test_show.py similarity index 100% rename from tests/functional/show/test_show.py rename to dbt-postgres/tests/functional/show/test_show.py diff --git a/tests/functional/sources/common_source_setup.py b/dbt-postgres/tests/functional/sources/common_source_setup.py similarity index 100% rename from tests/functional/sources/common_source_setup.py rename to dbt-postgres/tests/functional/sources/common_source_setup.py diff --git a/tests/functional/sources/data/seed.sql b/dbt-postgres/tests/functional/sources/data/seed.sql similarity index 100% rename from tests/functional/sources/data/seed.sql rename to dbt-postgres/tests/functional/sources/data/seed.sql diff --git a/tests/functional/sources/fixtures.py b/dbt-postgres/tests/functional/sources/fixtures.py similarity index 100% rename from tests/functional/sources/fixtures.py rename to dbt-postgres/tests/functional/sources/fixtures.py diff --git a/tests/functional/sources/test_simple_source.py b/dbt-postgres/tests/functional/sources/test_simple_source.py similarity index 100% rename from tests/functional/sources/test_simple_source.py rename to dbt-postgres/tests/functional/sources/test_simple_source.py diff --git a/tests/functional/sources/test_source_configs.py b/dbt-postgres/tests/functional/sources/test_source_configs.py similarity index 100% rename from tests/functional/sources/test_source_configs.py rename to dbt-postgres/tests/functional/sources/test_source_configs.py diff --git a/tests/functional/sources/test_source_fresher_state.py b/dbt-postgres/tests/functional/sources/test_source_fresher_state.py similarity index 100% rename from tests/functional/sources/test_source_fresher_state.py rename to dbt-postgres/tests/functional/sources/test_source_fresher_state.py diff --git a/tests/functional/sources/test_source_freshness.py b/dbt-postgres/tests/functional/sources/test_source_freshness.py similarity index 100% rename from tests/functional/sources/test_source_freshness.py rename to dbt-postgres/tests/functional/sources/test_source_freshness.py diff --git a/tests/functional/statements/fixtures.py b/dbt-postgres/tests/functional/statements/fixtures.py similarity index 100% rename from tests/functional/statements/fixtures.py rename to dbt-postgres/tests/functional/statements/fixtures.py diff --git a/tests/functional/statements/test_statements.py b/dbt-postgres/tests/functional/statements/test_statements.py similarity index 100% rename from tests/functional/statements/test_statements.py rename to dbt-postgres/tests/functional/statements/test_statements.py diff --git a/tests/functional/test_access.py b/dbt-postgres/tests/functional/test_access.py similarity index 100% rename from tests/functional/test_access.py rename to dbt-postgres/tests/functional/test_access.py diff --git a/tests/functional/test_analyses.py b/dbt-postgres/tests/functional/test_analyses.py similarity index 100% rename from tests/functional/test_analyses.py rename to dbt-postgres/tests/functional/test_analyses.py diff --git a/tests/functional/test_catalog.py b/dbt-postgres/tests/functional/test_catalog.py similarity index 100% rename from tests/functional/test_catalog.py rename to dbt-postgres/tests/functional/test_catalog.py diff --git a/tests/functional/test_clean.py b/dbt-postgres/tests/functional/test_clean.py similarity index 100% rename from tests/functional/test_clean.py rename to dbt-postgres/tests/functional/test_clean.py diff --git a/tests/functional/test_colors.py b/dbt-postgres/tests/functional/test_colors.py similarity index 100% rename from tests/functional/test_colors.py rename to dbt-postgres/tests/functional/test_colors.py diff --git a/tests/functional/test_column_quotes.py b/dbt-postgres/tests/functional/test_column_quotes.py similarity index 100% rename from tests/functional/test_column_quotes.py rename to dbt-postgres/tests/functional/test_column_quotes.py diff --git a/tests/functional/test_config.py b/dbt-postgres/tests/functional/test_config.py similarity index 100% rename from tests/functional/test_config.py rename to dbt-postgres/tests/functional/test_config.py diff --git a/tests/functional/test_connection_manager.py b/dbt-postgres/tests/functional/test_connection_manager.py similarity index 100% rename from tests/functional/test_connection_manager.py rename to dbt-postgres/tests/functional/test_connection_manager.py diff --git a/tests/functional/test_custom_target_path.py b/dbt-postgres/tests/functional/test_custom_target_path.py similarity index 100% rename from tests/functional/test_custom_target_path.py rename to dbt-postgres/tests/functional/test_custom_target_path.py diff --git a/tests/functional/test_cycles.py b/dbt-postgres/tests/functional/test_cycles.py similarity index 100% rename from tests/functional/test_cycles.py rename to dbt-postgres/tests/functional/test_cycles.py diff --git a/tests/functional/test_default_selectors.py b/dbt-postgres/tests/functional/test_default_selectors.py similarity index 100% rename from tests/functional/test_default_selectors.py rename to dbt-postgres/tests/functional/test_default_selectors.py diff --git a/tests/functional/test_events.py b/dbt-postgres/tests/functional/test_events.py similarity index 100% rename from tests/functional/test_events.py rename to dbt-postgres/tests/functional/test_events.py diff --git a/tests/functional/test_external_reference.py b/dbt-postgres/tests/functional/test_external_reference.py similarity index 100% rename from tests/functional/test_external_reference.py rename to dbt-postgres/tests/functional/test_external_reference.py diff --git a/tests/functional/test_fail_fast.py b/dbt-postgres/tests/functional/test_fail_fast.py similarity index 100% rename from tests/functional/test_fail_fast.py rename to dbt-postgres/tests/functional/test_fail_fast.py diff --git a/tests/functional/test_multiple_indexes.py b/dbt-postgres/tests/functional/test_multiple_indexes.py similarity index 100% rename from tests/functional/test_multiple_indexes.py rename to dbt-postgres/tests/functional/test_multiple_indexes.py diff --git a/tests/functional/test_ref_override.py b/dbt-postgres/tests/functional/test_ref_override.py similarity index 100% rename from tests/functional/test_ref_override.py rename to dbt-postgres/tests/functional/test_ref_override.py diff --git a/tests/functional/test_relation_name.py b/dbt-postgres/tests/functional/test_relation_name.py similarity index 100% rename from tests/functional/test_relation_name.py rename to dbt-postgres/tests/functional/test_relation_name.py diff --git a/tests/functional/test_severity.py b/dbt-postgres/tests/functional/test_severity.py similarity index 100% rename from tests/functional/test_severity.py rename to dbt-postgres/tests/functional/test_severity.py diff --git a/tests/functional/test_store_test_failures.py b/dbt-postgres/tests/functional/test_store_test_failures.py similarity index 100% rename from tests/functional/test_store_test_failures.py rename to dbt-postgres/tests/functional/test_store_test_failures.py diff --git a/tests/functional/test_thread_count.py b/dbt-postgres/tests/functional/test_thread_count.py similarity index 100% rename from tests/functional/test_thread_count.py rename to dbt-postgres/tests/functional/test_thread_count.py diff --git a/tests/functional/test_timezones.py b/dbt-postgres/tests/functional/test_timezones.py similarity index 100% rename from tests/functional/test_timezones.py rename to dbt-postgres/tests/functional/test_timezones.py diff --git a/tests/functional/test_types.py b/dbt-postgres/tests/functional/test_types.py similarity index 100% rename from tests/functional/test_types.py rename to dbt-postgres/tests/functional/test_types.py diff --git a/tests/functional/test_unlogged_table.py b/dbt-postgres/tests/functional/test_unlogged_table.py similarity index 100% rename from tests/functional/test_unlogged_table.py rename to dbt-postgres/tests/functional/test_unlogged_table.py diff --git a/tests/functional/unit_testing/fixtures.py b/dbt-postgres/tests/functional/unit_testing/fixtures.py similarity index 100% rename from tests/functional/unit_testing/fixtures.py rename to dbt-postgres/tests/functional/unit_testing/fixtures.py diff --git a/tests/functional/unit_testing/test_csv_fixtures.py b/dbt-postgres/tests/functional/unit_testing/test_csv_fixtures.py similarity index 100% rename from tests/functional/unit_testing/test_csv_fixtures.py rename to dbt-postgres/tests/functional/unit_testing/test_csv_fixtures.py diff --git a/tests/functional/unit_testing/test_state.py b/dbt-postgres/tests/functional/unit_testing/test_state.py similarity index 100% rename from tests/functional/unit_testing/test_state.py rename to dbt-postgres/tests/functional/unit_testing/test_state.py diff --git a/tests/functional/unit_testing/test_unit_testing.py b/dbt-postgres/tests/functional/unit_testing/test_unit_testing.py similarity index 100% rename from tests/functional/unit_testing/test_unit_testing.py rename to dbt-postgres/tests/functional/unit_testing/test_unit_testing.py diff --git a/tests/functional/unit_testing/test_ut_dependency.py b/dbt-postgres/tests/functional/unit_testing/test_ut_dependency.py similarity index 100% rename from tests/functional/unit_testing/test_ut_dependency.py rename to dbt-postgres/tests/functional/unit_testing/test_ut_dependency.py diff --git a/tests/functional/unit_testing/test_ut_sources.py b/dbt-postgres/tests/functional/unit_testing/test_ut_sources.py similarity index 100% rename from tests/functional/unit_testing/test_ut_sources.py rename to dbt-postgres/tests/functional/unit_testing/test_ut_sources.py diff --git a/tests/functional/utils.py b/dbt-postgres/tests/functional/utils.py similarity index 100% rename from tests/functional/utils.py rename to dbt-postgres/tests/functional/utils.py diff --git a/tests/unit/test_adapter.py b/dbt-postgres/tests/unit/test_adapter.py similarity index 100% rename from tests/unit/test_adapter.py rename to dbt-postgres/tests/unit/test_adapter.py diff --git a/tests/unit/test_adapter_conversions.py b/dbt-postgres/tests/unit/test_adapter_conversions.py similarity index 100% rename from tests/unit/test_adapter_conversions.py rename to dbt-postgres/tests/unit/test_adapter_conversions.py diff --git a/tests/unit/test_connection.py b/dbt-postgres/tests/unit/test_connection.py similarity index 100% rename from tests/unit/test_connection.py rename to dbt-postgres/tests/unit/test_connection.py diff --git a/tests/unit/test_filter_catalog.py b/dbt-postgres/tests/unit/test_filter_catalog.py similarity index 100% rename from tests/unit/test_filter_catalog.py rename to dbt-postgres/tests/unit/test_filter_catalog.py diff --git a/tests/unit/test_materialized_view.py b/dbt-postgres/tests/unit/test_materialized_view.py similarity index 100% rename from tests/unit/test_materialized_view.py rename to dbt-postgres/tests/unit/test_materialized_view.py diff --git a/tests/unit/test_renamed_relations.py b/dbt-postgres/tests/unit/test_renamed_relations.py similarity index 100% rename from tests/unit/test_renamed_relations.py rename to dbt-postgres/tests/unit/test_renamed_relations.py diff --git a/tests/unit/utils.py b/dbt-postgres/tests/unit/utils.py similarity index 100% rename from tests/unit/utils.py rename to dbt-postgres/tests/unit/utils.py diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index 30dcb5056..000000000 --- a/pyproject.toml +++ /dev/null @@ -1,112 +0,0 @@ -[project] -dynamic = ["version"] -name = "dbt-postgres" -description = "The set of adapter protocols and base functionality that supports integration with dbt-core" -readme = "README.md" -keywords = ["dbt", "adapter", "adapters", "database", "elt", "dbt-core", "dbt Core", "dbt Cloud", "dbt Labs", "postgres"] -requires-python = ">=3.9.0" -authors = [ - { name = "dbt Labs", email = "info@dbtlabs.com" }, -] -maintainers = [ - { name = "dbt Labs", email = "info@dbtlabs.com" }, -] -classifiers = [ - "Development Status :: 5 - Production/Stable", - "License :: OSI Approved :: Apache Software License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: Microsoft :: Windows", - "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", -] -dependencies = [ - "psycopg2-binary>=2.9,<3.0", - "dbt-adapters>=1.7.0,<2.0", - # add dbt-core to ensure backwards compatibility of installation, this is not a functional dependency - "dbt-core>=1.8.0", - # installed via dbt-adapters but used directly - "dbt-common>=1.0.4,<2.0", - "agate>=1.0,<2.0", -] -[project.urls] -Homepage = "https://github.com/dbt-labs/dbt-postgres" -Documentation = "https://docs.getdbt.com" -Repository = "https://github.com/dbt-labs/dbt-postgres.git" -Issues = "https://github.com/dbt-labs/dbt-postgres/issues" -Changelog = "https://github.com/dbt-labs/dbt-postgres/blob/main/CHANGELOG.md" - -[build-system] -requires = ["hatchling"] -build-backend = "hatchling.build" - -[tool.hatch.build.targets.sdist] -include = ["dbt"] - -[tool.hatch.build.targets.wheel] -packages = ["dbt"] - -[tool.hatch.version] -path = "dbt/adapters/postgres/__version__.py" - -[tool.hatch.envs.default] -python = "3.9" -dependencies = [ - "dbt-adapters @ git+https://github.com/dbt-labs/dbt-adapters.git", - "dbt-common @ git+https://github.com/dbt-labs/dbt-common.git", - "dbt-tests-adapter @ git+https://github.com/dbt-labs/dbt-adapters.git#subdirectory=dbt-tests-adapter", - "dbt-core @ git+https://github.com/dbt-labs/dbt-core.git#subdirectory=core", - "pre-commit==3.7.0", - "freezegun", - "pytest", - "pytest-dotenv", - "pytest-mock", - "pytest-xdist", -] -[tool.hatch.envs.default.env-vars] -DBT_TEST_USER_1 = "dbt_test_user_1" -DBT_TEST_USER_2 = "dbt_test_user_2" -DBT_TEST_USER_3 = "dbt_test_user_3" -[tool.hatch.envs.default.scripts] -setup = "pre-commit install" -code-quality = "pre-commit run --all-files" -unit-tests = "python -m pytest {args:tests/unit}" -integration-tests = "python -m pytest {args:tests/functional}" -docker-dev = [ - "echo Does not support integration testing, only development and unit testing. See issue https://github.com/dbt-labs/dbt-postgres/issues/99", - "docker build -f docker/dev.Dockerfile -t dbt-postgres-dev .", - "docker run --rm -it --name dbt-postgres-dev -v $(pwd):/opt/code dbt-postgres-dev", -] -docker-prod = "docker build -f docker/Dockerfile -t dbt-postgres ." - -[tool.hatch.envs.build] -detached = true -dependencies = [ - "wheel", - "twine", - "check-wheel-contents", -] -[tool.hatch.envs.build.scripts] -check-all = [ - "- check-wheel", - "- check-sdist", -] -check-wheel = [ - "twine check dist/*", - "find ./dist/dbt_postgres-*.whl -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", - "pip freeze | grep dbt-postgres", -] -check-sdist = [ - "check-wheel-contents dist/*.whl --ignore W007,W008", - "find ./dist/dbt_postgres-*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/", - "pip freeze | grep dbt-postgres", -] - -[tool.pytest] -env_files = ["test.env"] -testpaths = [ - "tests/functional", - "tests/unit", -]